diff --git a/Parallel_Computing/Julia/Example1/README.md b/Parallel_Computing/Julia/Example1/README.md
new file mode 100644
index 00000000..cd35b535
--- /dev/null
+++ b/Parallel_Computing/Julia/Example1/README.md
@@ -0,0 +1,21 @@
+This example demonstrates basic usage of MPI in Julia.
+
+### Contents
+- hello_world_mpi.jl: Julia source code
+- hello_world_mpi.sh: slurm submission script.
+
+### Example Usage:
+
+```bash
+sbatch hello_world_mpi.sh
+```
+
+### Example Output:
+
+```bash
+$ cat hello_world_mpi.out
+Hello world, I am rank 3 of 4
+Hello world, I am rank 2 of 4
+Hello world, I am rank 0 of 4
+Hello world, I am rank 1 of 4
+```
\ No newline at end of file
diff --git a/Parallel_Computing/Julia/Example1/hello_world_mpi.jl b/Parallel_Computing/Julia/Example1/hello_world_mpi.jl
new file mode 100644
index 00000000..3d8821df
--- /dev/null
+++ b/Parallel_Computing/Julia/Example1/hello_world_mpi.jl
@@ -0,0 +1,7 @@
+using MPI
+MPI.Init()
+
+comm = MPI.COMM_WORLD
+print("Hello world, I am rank $(MPI.Comm_rank(comm)) of $(MPI.Comm_size(comm))\n")
+MPI.Barrier(comm)
+MPI.Finalize()
\ No newline at end of file
diff --git a/Parallel_Computing/Julia/Example1/hello_world_mpi.sh b/Parallel_Computing/Julia/Example1/hello_world_mpi.sh
new file mode 100644
index 00000000..8d29bc00
--- /dev/null
+++ b/Parallel_Computing/Julia/Example1/hello_world_mpi.sh
@@ -0,0 +1,15 @@
+#!/bin/bash
+#SBATCH -J hello_world_mpi
+#SBATCH -p test # partition
+#SBATCH -n 4 # number of cores
+#SBATCH --mem-per-cpu=5GB
+#SBATCH -t 0-01:00 # time (D-HH-MM)
+#SBATCH -e hello_world_mpi.err
+#SBATCH -o hello_world_mpi.out
+
+export UCX_WARN_UNUSED_ENV_VARS=n
+module load gcc/12.2.0-fasrc01 openmpi/4.1.4-fasrc01
+
+julia --project=~/MPIenv -e 'using MPIPreferences; MPIPreferences.use_system_binary()'
+julia --project=~/MPIenv -e 'using Pkg; Pkg.build("MPI"; verbose=true)'
+mpiexec -n 4 julia --project=~/MPIenv hello_world_mpi.jl
\ No newline at end of file
diff --git a/Parallel_Computing/Julia/Example2/README.md b/Parallel_Computing/Julia/Example2/README.md
new file mode 100644
index 00000000..ea75dcd9
--- /dev/null
+++ b/Parallel_Computing/Julia/Example2/README.md
@@ -0,0 +1,18 @@
+This example demonstrates Monte Carlo computation of pi by MPI simulation.
+
+### Contents
+- compute_pi.jl: Julia source code
+- compute_pi.sh: slurm submission script.
+
+### Example Usage:
+
+```bash
+sbatch compute_pi.sh
+```
+
+### Example Output:
+
+```bash
+$ cat compute_pi.out
+Estimate of pi is: 3.14167883
+```
\ No newline at end of file
diff --git a/Parallel_Computing/Julia/Example2/compute_pi.jl b/Parallel_Computing/Julia/Example2/compute_pi.jl
new file mode 100644
index 00000000..f46d0657
--- /dev/null
+++ b/Parallel_Computing/Julia/Example2/compute_pi.jl
@@ -0,0 +1,34 @@
+using MPI
+using LinearAlgebra
+
+MPI.Init()
+
+comm = MPI.COMM_WORLD
+rank = MPI.Comm_rank(comm)
+comm_size = MPI.Comm_size(comm)
+root = 0
+MPI.Barrier(comm)
+
+N = 1e8;
+function count_pi(N)
+ R = 1;
+ local_count = 0
+ for i in 1:N
+ coords = rand(2)
+ if norm(coords) < R
+ local_count = local_count + 1
+ end
+ end
+ return local_count
+end
+local_count = count_pi(N)
+
+MPI.Barrier(comm)
+total_count = MPI.Reduce(local_count, +, root, comm)
+
+if rank == root
+ print("\n Estimate of pi is: ")
+ print(total_count / N / comm_size * 4)
+end
+
+MPI.Finalize()
\ No newline at end of file
diff --git a/Parallel_Computing/Julia/Example2/compute_pi.sh b/Parallel_Computing/Julia/Example2/compute_pi.sh
new file mode 100644
index 00000000..17f937ef
--- /dev/null
+++ b/Parallel_Computing/Julia/Example2/compute_pi.sh
@@ -0,0 +1,15 @@
+#!/bin/bash
+#SBATCH -J compute_pi
+#SBATCH -p test # partition
+#SBATCH -n 4 # number of cores
+#SBATCH --mem-per-cpu=5GB
+#SBATCH -t 0-01:00 # time (D-HH-MM)
+#SBATCH -e compute_pi.err
+#SBATCH -o compute_pi.out
+
+export UCX_WARN_UNUSED_ENV_VARS=n
+module load gcc/12.2.0-fasrc01 openmpi/4.1.4-fasrc01
+
+julia --project=~/MPIenv -e 'using MPIPreferences; MPIPreferences.use_system_binary()'
+julia --project=~/MPIenv -e 'using Pkg; Pkg.build("MPI"; verbose=true)'
+mpiexec -n 4 julia --project=~/MPIenv compute_pi.jl
\ No newline at end of file
diff --git a/Parallel_Computing/Julia/README.md b/Parallel_Computing/Julia/README.md
new file mode 100644
index 00000000..68646fd0
--- /dev/null
+++ b/Parallel_Computing/Julia/README.md
@@ -0,0 +1,29 @@
+# MPI with Julia on FASRC
+
+### Installing Julia
+
+First, install Julia on the cluster using [juliaup](https://github.com/JuliaLang/juliaup):
+
+```bash
+curl -fsSL https://install.julialang.org | sh
+```
+
+### Creating an environment for MPI
+
+```bash
+cd ~
+mkdir MPIenv
+```
+
+Then in julia, enter Package Manager Mode: Type ] to enter the package manager mode. Run
+
+```bash
+(@v1.11) pkg> activate ~/MPIenv
+(@v1.11) pkg> add MPI MPIPreferences
+```
+
+to install [MPI.jl](https://github.com/JuliaParallel/MPI.jl). After creating this environment, any parallel script can be run in this environment, for example:
+
+```bash
+mpiexec -n 4 julia --project=~/MPIenv script.jl
+```
\ No newline at end of file