Shell-备份工具

Shell-备份工具

一个自己写的shell脚本备份工具

配置文件

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
## 主配置文件
cat backup.lst
#1pg|2gis_sit|3IP|4Port|5user|6password|7cycle|
pg|gis_sit|172.31.34.25|5083|postgres|abc123!!!|days|
pg|gis_prod|172.31.56.16|5083|postgres|abc123!!!|days|
pg|gis_jx|172.31.71.117|5083|postgres|abc123!!!|days|
pg|gis_old_lite|172.31.45.42|5083|postgres|abc123!!!|days|
pg|gis_old|172.31.45.42|5083|postgres|abc123!!!|weeks|
mysql|jx|3|4|5|6|days|
mysql|sit|3|4|5|6|days|
mysql|show|3|4|5|6|days|
mysql|prod|3|4|5|6|days|
mysql|wiki|3|4|5|6|days|
mysql|bigdata|3|4|5|6|days|

## mysql-client配置文件
cat ./mysql_cnf/jx.lst
[client]
user=root
password=abc123!!!
host=172.31.71.117
port=3306

## ansible配置
cat ansible_host.lst
[nginx_default]
aws-prod-app-01
aws-prod-app-02
aws-prod-app-03
aws-prod-app-04
aws-prod-app-05
aws-sit-app-01
aws-sit-app-02
aws-sit-app-04
aws-show-app-01

[nginx_data]
aws-jx-app-01
aws-jx-app-02

[nginx_default:vars]
ansible_user=ops_root
ansible_password=dNqzrKHu8UQsuxcX

[nginx_data:vars]
ansible_user=ops_root
ansible_password=dNqzrKHu8UQsuxcX

主脚本

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
#!/bin/sh
DATETIME=$(date +"%Y%m%d%H%M")
ROOT=/nfs_hy_backup/backup
v_backuplist=$ROOT/backup.lst
v_weekday=$(date +%w)
v_day=$(date +%d)

cd $ROOT
echo "***********************************************************************"
echo $(date) " -- backup bgein"
echo "***********************************************************************"

## backup
for row in $(cat $v_backuplist|grep -v ^$|grep -v ^#)
do
#pg|gis-sit|172.31.34.25|5083|postgres|Hxkj2022!!!|
v_type=$(echo $row |awk -F\| '{print $1}')
v_name=$(echo $row |awk -F\| '{print $2}')
v_ip=$(echo $row |awk -F\| '{print $3}')
v_port=$(echo $row |awk -F\| '{print $4}')
v_user=$(echo $row |awk -F\| '{print $5}')
v_pass=$(echo $row |awk -F\| '{print $6}')


case $v_type in
pg)
export PGPASSWORD=$v_pass
case $v_name in
gis_old_lite)
v_backupfile=${v_type}_${v_name}_${DATETIME}.tar
pg_dumpall -h $v_ip -U $v_user -p $v_port --exclude-database=common -c --if-exists |gzip > ${v_type}_${v_name}_${DATETIME}.1.sql.gz
pg_dump -h $v_ip -U $v_user -p $v_port --dbname=common --exclude-schema=offline_map -c --if-exists|gzip > ${v_type}_${v_name}_${DATETIME}.2.sql.gz
tar -cvf $v_backupfile ${v_type}_${v_name}_${DATETIME}.*.sql.gz --remove-files
# /usr/local/bin/aws s3 cp $v_backupfile s3://hxs3/backup/
# if [[ "v${v_day}" == "v01" ]] || [[ "v${v_day}" == "v15" ]]; then /usr/local/bin/aws s3 cp $v_backupfile s3://hxs3/backup/ s3://hxarchive/永久备份/; fi
;;
gis_old)
if [[ "v${v_weekday}" == "v6" ]]
then
v_backupfile=${v_type}_${v_name}_${DATETIME}.sql.gz
pg_dumpall -h $v_ip -U $v_user -p $v_port -c --if-exists |gzip > $v_backupfile
# /usr/local/bin/aws s3 cp $v_backupfile s3://hxs3/backup/
fi
;;
*)
v_backupfile=${v_type}_${v_name}_${DATETIME}.sql.gz
pg_dumpall -h $v_ip -U $v_user -p $v_port -c --if-exists |gzip > $v_backupfile
# /usr/local/bin/aws s3 cp $v_backupfile s3://hxs3/backup/
# if [[ "v${v_day}" == "v01" ]] || [[ "v${v_day}" == "v15" ]]; then /usr/local/bin/aws s3 cp $v_backupfile s3://hxs3/backup/ s3://hxarchive/永久备份/; fi
;;
esac
;;
mysql)
v_mysqllst=./mysql_cnf/${v_name}.lst
v_backupfile=${v_type}_${v_name}_${DATETIME}.sql.gz
if [[ "v${v_name}" == "vbigdata" ]]; then mysql_ops=' --skip-column-statistics' ; fi
mysqldump --defaults-extra-file=$v_mysqllst --all-databases --set-gtid-purged=OFF $mysql_ops -C | gzip > $v_backupfile
# /usr/local/bin/aws s3 cp $v_backupfile s3://hxs3/backup/
# if [[ "v${v_day}" == "v01" ]] || [[ "v${v_day}" == "v15" ]]; then /usr/local/bin/aws s3 cp $v_backupfile s3://hxs3/backup/ s3://hxarchive/永久备份/; fi
;;
*)
;;
esac
done

## nacos backup
./nacos_backup.py

## nginx backup
ansible nginx_data -i ansible_hosts.lst -m synchronize -a "src=/data/nginx/conf/conf.d dest=nginx_{{inventory_hostname}} mode=pull"
tar -czvf nginx_${DATETIME}.tar.gz nginx_hy* --remove
#/usr/local/bin/aws s3 cp nginx_${DATETIME}.tar.gz s3://hxs3/backup/

## clenaup
find $ROOT -mtime +10 -type f -name "*.gz" | xargs rm -fv
find $ROOT -mtime +10 -type f -name "*.zip" | xargs rm -fv
find $ROOT -mtime +10 -type f -name "*.tgz" | xargs rm -fv
find $ROOT -mtime +10 -type f -name "*.tar" | xargs rm -fv

find /nfs_hy_backup/gitlab_hy/ -name "*_gitlab_backup.tar" -mtime +30 | xargs rm -fv
find /nfs_hy_backup/gitlab_hx/ -name "*_gitlab_backup.tar" -mtime +30 | xargs rm -fv
find /nfs_hy_backup/gitlab_aws/ -name "*_gitlab_backup.tar" -mtime +30 | xargs rm -fv

## sync svn
rsync -avzut -P --progress --delete root@10.77.114.99:/date/svn/ /nfs_hy_backup/svn_rsync/

## sync hx-confluence
rsync -avzut -P --progress --delete root@10.77.114.101:/var/atlassian/ /nfs_hy_backup/confluence_rsync/confluence-data/
rsync -avzut -P --progress --delete root@10.77.114.101:/opt/atlassian/confluence /nfs_hy_backup/confluence_rsync/confluence/

## sync hx-jira
rsync -avzut -P --progress --delete root@10.77.114.102:/var/atlassian/ /nfs_hy_backup/jira_rsync/jira-data/
rsync -avzut -P --progress --delete root@10.77.114.102:/opt/atlassian/jira /nfs_hy_backup/jira_rsync/jira/

## sync jenkins
rsync -avzut -P --progress --delete root@hy-base-01:/data/jenkins/ /nfs_hy_backup/jenkins_sync/jenkins/ --exclude logs --exclude temp
rsync -avzut -P --progress --delete root@hy-base-01:/data/jenkins_data/ /nfs_hy_backup/jenkins_sync/jenkins_data/ --exclude workspace

## sync nexus
rsync -avzut -P --progress --delete root@hy-base-01:/data/nexus/ /nfs_hy_backup/nexus_sync/nexus/ --exclude sonatype-work/nexus3/log

## sync aws confluence,jira
rsync -avzut -P --progress --delete root@hy-base-03:/data/atlassian/ /nfs_hy_backup/wiki_jira_rsync/atlassian/ --exclude confluence/logs --exclude confluence-data/logs --exclude jira/logs --exclude jira-data/log



echo "***********************************************************************"
echo $(date) " -- backup end"
echo "***********************************************************************"

nacos 备份脚本

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
#!/data/anaconda3/bin/python3.9
from fileinput import filename
import requests
import json
import datetime
import os
from boto3.session import Session

## s3 config
aws_key = "AKIAUPGCAPWX4FD5N2CO"
aws_secret = "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"
region_name = "cn-northwest-1"
session = Session(aws_access_key_id=aws_key,
aws_secret_access_key=aws_secret,
region_name=region_name)

s3 = session.resource("s3")
client = session.client("s3")
bucket = "hxs3"

## nacos
nacos_list=[
# 'http://10.9.127.244:8848|c23a1b4c-e97f-45ae-a5c0-1e7f1a8d22e9|nacos|nacos|nacos-dev',
'http://nacos.farmbgy.net:8848|da7aaaa2-7505-4c1d-98c6-9e2e7778ef55|nacos|abc123!!!|nacos-prod',
'http://aws-sit-code-pro-01:8848|09a89c03-8f14-40a7-a44b-2be7e9f87e14|nacos|abc123!!!|nacos-sit',
'http://aws-jx-middle-01:8848|c876967c-d8dd-4318-9bf1-e7e72eb47d13|nacos|abc123!!!|nacos-jx',
'http://aws-show-middle-01:8848|da7aaaa2-7505-4c1d-98c6-9e2e7778ef55|nacos|abc123!!!|nacos-show'
]

for row in nacos_list:
url,namespace,username,password,filename = row.strip().split('|')

login_url = url+'/nacos/v1/auth/users/login'
data = {'username':username,
'password':password}
s = requests.session()
resp = s.post(login_url,data)
token = json.loads(resp.text)['accessToken']

backup_url = url+'/nacos/v1/cs/configs?export=true&tenant='+namespace+'&group=&appName=&dataId=&ids=&accessToken='+token

filename = os.path.abspath(os.path.dirname(__file__))+'/'+filename+'-'+datetime.datetime.now().strftime('%Y%m%d')+'.zip'

r = requests.get(backup_url)
with open(filename, "wb") as code:
code.write(r.content)

upload_data = open(filename,"rb")
upload_key = "backup/"+os.path.basename(filename)
file_obj = s3.Bucket(bucket).put_object(Key=upload_key, Body=upload_data)
print(file_obj)