Skip to content

Commit 36f174d

Browse files
Merge pull request #1 from DNXLabs/parallel-python
add parallel scripts
2 parents c4a86b3 + 99dc6a3 commit 36f174d

File tree

3 files changed

+105
-0
lines changed

3 files changed

+105
-0
lines changed

scripts/parallel-script/README.md

Lines changed: 41 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,41 @@
1+
# Parallel sync/restore
2+
3+
These scripts will help you to start a multi thread processment either for `aws s3 cp` and `mysql` restore. It's possible to adapt both scripts and use them for many multi process as you need.
4+
5+
## Describing functions - parallel_db_restore.py
6+
7+
- restore()
8+
- The function responsible to run the mysql restore command.
9+
- It will run a subprocess lib to call a syscall.
10+
- Requires: a list of databases
11+
12+
- list_dbs()
13+
- This function will list the *.sql files in a directory
14+
- It will grab only the database name from the dump
15+
- Requires:
16+
- Run a dump database like: `mysql -N -e 'show databases' | while read dbname; do mysqldump --complete-insert --routines --triggers --single-transaction "$dbname" > /home/restore/"$dbname".sql; done`
17+
18+
- main()
19+
- pool = the number of threads running at the same time
20+
21+
## Describing functions - parallel_sync.py
22+
23+
- sync()
24+
- The function responsible to run the `aws s3 cp` restore command.
25+
- It will run a subprocess lib to call a syscall.
26+
- Requires: a list of files (or folders) to sync
27+
28+
- list_files()
29+
- Create a file called my_files_list.txt with the files/folders list:
30+
```
31+
file1
32+
file2
33+
file3
34+
```
35+
36+
- main()
37+
- pool = the number of threads running at the same time
38+
39+
## Changing the scripts
40+
41+
The scripts in this repo are examples. Feel free to change them to run your own parallel command. When changing the scripts don't forget to change the function to list whatever you want.
Lines changed: 33 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,33 @@
1+
import subprocess, os, time, sys
2+
import glob
3+
from multiprocessing import Pool
4+
5+
users = []
6+
def restore(user):
7+
my_host_dest="my_database.ap-southeast-2.rds.amazonaws.com"
8+
db_admin="admin"
9+
db_user="%s" % user
10+
db_pass=" " #db_passw fill up this field
11+
args = "/usr/bin/mysql -h %s -u %s -p%s < /home/restore/%s.sql" % (my_host_dest,db_admin,db_pass,db_user)
12+
try:
13+
subprocess.call('%s' % args, stderr=subprocess.PIPE, shell=True)
14+
except Exception as error:
15+
print(error)
16+
sys.exit(1)
17+
18+
def list_dbs():
19+
dbs = glob.glob("/home/restore/*.sql")
20+
for d in dbs:
21+
db = d.split('.')[0].split('/')[4].split('_')[2]
22+
users.append(db)
23+
return users
24+
25+
def main():
26+
users = list_dbs()
27+
start = time.time()
28+
pool = Pool(20)
29+
pool.map(restore, users)
30+
print('Restore finished in %s seconds' % str(time.time() - start))
31+
32+
if __name__ == '__main__':
33+
main()
Lines changed: 31 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,31 @@
1+
import subprocess, os, time, sys
2+
import glob
3+
from multiprocessing import Pool
4+
5+
files = []
6+
def sync(file_name):
7+
orig_bucket="s3://my-origin-bucket" # origin bucket
8+
dest_bucket="s3://my-destination-bucket" # destination bucket
9+
args = "aws s3 cp %s/%s %s/%s >> ./files_finished.txt" % (orig_bucket,file_name,dest_bucket,file_name)
10+
try:
11+
proc = subprocess.call('%s' % args,
12+
stderr=subprocess.PIPE, shell=True)
13+
except Exception as error:
14+
print error
15+
sys.exit(1)
16+
17+
def list_files():
18+
with open("my_files_list.txt", "r") as file_list:
19+
for f in file_list:
20+
files.append(f.replace('\n', ' ').replace('\r', ''))
21+
return files
22+
23+
def main():
24+
files = list_files()
25+
start = time.time()
26+
pool = Pool(15)
27+
pool.map(sync, files)
28+
print 'Sync finished in %s seconds' % str(time.time() - start)
29+
30+
if __name__ == '__main__':
31+
main()

0 commit comments

Comments
 (0)