forked from camenlx/zoc-bootstrap
-
Notifications
You must be signed in to change notification settings - Fork 0
/
bootstrap.sh
executable file
·97 lines (91 loc) · 3.32 KB
/
bootstrap.sh
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
#!/bin/bash
s3cmd="aws s3"
s3name="files.01coin.io"
s3bucket="s3://$s3name/"
s3https="https://$s3name/"
file="bootstrap.dat"
file_zip="$file.zip"
file_gz="$file.tar.gz"
file_sha256="sha256.txt"
file_md5="md5.txt"
header=`cat header.md`
footer=`cat footer.md`
# pass network name as a param
do_the_job() {
network=$1
blocks=$2
mkdir -p bootstrap$network$blocks
date=`date -u`
date_fmt=`date -u +%Y-%m-%d`
s3networkPath="$s3bucket$network/"
s3currentPath="$s3networkPath$date_fmt/"
s3currentUrl="$s3https$network/$date_fmt/"
linksFile="links-$network.md"
prevLinks=`head $linksFile`
echo "$network job - Starting..."
# process blockchain
./linearize-hashes.py linearize-$network.cfg > hashlist.txt
./linearize-data.py linearize-$network.cfg
# compress
zip $file_zip $file
GZIP=-9 tar -cvzf $file_gz $file
# calculate checksums
sha256sum $file > $file_sha256
sha256sum $file_zip >> $file_sha256
sha256sum $file_gz >> $file_sha256
md5sum $file > $file_md5
md5sum $file_zip >> $file_md5
md5sum $file_gz >> $file_md5
# store
$s3cmd cp $file_zip $s3currentPath$file_zip --acl public-read
$s3cmd cp $file_gz $s3currentPath$file_gz --acl public-read
$s3cmd cp $file_sha256 $s3currentPath$file_sha256 --acl public-read
$s3cmd cp $file_md5 $s3currentPath$file_md5 --acl public-read
$s3cmd cp $s3currentPath$file_zip $s3networkPath$file_zip --acl public-read
$s3cmd cp $s3currentPath$file_gz $s3networkPath$file_gz --acl public-read
$s3cmd cp $s3currentPath$file_sha256 $s3networkPath$file_sha256 --acl public-read
$s3cmd cp $s3currentPath$file_md5 $s3networkPath$file_md5 --acl public-read
# invalidate old latest
aws cloudfront create-invalidation --distribution-id yourid --paths /$network/$file_zip /$network/$file_gz /$network/$file_sha256 /$network/$file_md5
# update docs
url_zip=$s3currentUrl$file_zip
url_gz=$s3currentUrl$file_gz
url_sha256=$s3currentUrl$file_sha256
url_md5=$s3currentUrl$file_md5
size_zip=`ls -lh $file_zip | awk -F" " '{ print $5 }'`
size_gz=`ls -lh $file_gz | awk -F" " '{ print $5 }'`
newLinks="Block $blocks: $date [zip]($url_zip) ($size_zip) [gz]($url_gz) ($size_gz) [SHA256]($url_sha256) [MD5]($url_md5)\n\n$prevLinks"
echo -e "$newLinks" > $linksFile
mv $file $file_zip $file_gz $file_sha256 $file_md5 hashlist.txt bootstrap$network$blocks/
echo -e "#### For $network:\n\n$newLinks\n\n" >> README.md
# clean up old files
keepDays=7
scanDays=30
oldFolders=$($s3cmd ls $s3networkPath | grep -oP '[0-9]*-[0-9]*-[0-9]*')
while [ $keepDays -lt $scanDays ]; do
loopDate=$(date -u -d "now -"$keepDays" days" +%Y-%m-%d)
found=$(echo -e $oldFolders | grep -oP $loopDate)
if [ "$found" != "" ]; then
echo "found old folder $found, deleting $s3networkPath$loopDate/ ..."
$s3cmd rm $s3networkPath$loopDate --recursive
fi
let keepDays=keepDays+1
done
echo "$network job - Done!"
}
# fill the header
echo -e "$header\n" > README.md
# mainnet
#cat ~/.zeroonecore/blocks/blk0000* > $file
blocks=`./zeroone-cli getblockcount`
do_the_job mainnet $blocks
# testnet
#cat ~/.zeroonetest/testnet3/blocks/blk0000* > $file
blocks=`./zeroone-cli_testnet -datadir=$HOME/.zeroonetest getblockcount`
do_the_job testnet $blocks
# finalize with the footer
echo -e "$footer" >> README.md
# push to github
git add *.md
git commit -m "$date - autoupdate"
git push