NAS-122030 / 23.10 / Adds check to only render hostPath key if it has a value (#1210)

* Remove hostPath key from values.yaml

* Add migrations

* bump

* rename migration files and make it safer

* fix wrongly nested dict
This commit is contained in:
Stavros Kois
2023-05-30 16:23:29 +03:00
committed by GitHub
parent 99f57d4ab3
commit 85d2fab14d
63 changed files with 629 additions and 61 deletions

View File

@@ -4,7 +4,7 @@ description: Elasticsearch is the distributed, RESTful search and analytics engi
annotations:
title: Elastic Search
type: application
version: 1.0.7
version: 1.0.8
apiVersion: v2
appVersion: 8.8.0
kubeVersion: '>=1.16.0-0'

View File

@@ -0,0 +1,29 @@
#!/usr/bin/python3
import json
import os
import sys
def migrate(values):
storageKey = 'esStorage'
storages = ['data']
for storage in storages:
check_val = values.get(storageKey, {}).get(storage, {})
if not isinstance(check_val, dict) or not check_val or check_val.get('type', 'hostPath') == 'hostPath':
continue
values[storageKey][storage] = {key: value for key, value in check_val.items() if key != 'hostPath'}
return values
if __name__ == '__main__':
if len(sys.argv) != 2:
exit(1)
if os.path.exists(sys.argv[1]):
with open(sys.argv[1], 'r') as f:
print(json.dumps(migrate(json.loads(f.read()))))

View File

@@ -27,5 +27,4 @@ esNetwork:
esStorage:
data:
type: ixVolume
hostPath: ''
datasetName: data

View File

@@ -3,7 +3,7 @@ description: The Prometheus monitoring system and time series database.
annotations:
title: Prometheus
type: application
version: 1.0.7
version: 1.0.8
apiVersion: v2
appVersion: v2.44.0
kubeVersion: '>=1.16.0-0'

View File

@@ -0,0 +1,29 @@
#!/usr/bin/python3
import json
import os
import sys
def migrate(values):
storageKey = 'prometheusStorage'
storages = ['data', 'config']
for storage in storages:
check_val = values.get(storageKey, {}).get(storage, {})
if not isinstance(check_val, dict) or not check_val or check_val.get('type', 'hostPath') == 'hostPath':
continue
values[storageKey][storage] = {key: value for key, value in check_val.items() if key != 'hostPath'}
return values
if __name__ == '__main__':
if len(sys.argv) != 2:
exit(1)
if os.path.exists(sys.argv[1]):
with open(sys.argv[1], 'r') as f:
print(json.dumps(migrate(json.loads(f.read()))))

View File

@@ -26,9 +26,7 @@ prometheusRunAs:
prometheusStorage:
data:
type: ixVolume
hostPath: ''
datasetName: data
config:
type: ixVolume
hostPath: ''
datasetName: config

View File

@@ -3,7 +3,7 @@ description: Free and open source, powerful network-wide ads & trackers blocking
annotations:
title: AdGuard Home
type: application
version: 1.0.4
version: 1.0.5
apiVersion: v2
appVersion: 'v0.107.26'
kubeVersion: '>=1.16.0-0'

View File

@@ -0,0 +1,29 @@
#!/usr/bin/python3
import json
import os
import sys
def migrate(values):
storageKey = 'adguardStorage'
storages = ['work', 'conf']
for storage in storages:
check_val = values.get(storageKey, {}).get(storage, {})
if not isinstance(check_val, dict) or not check_val or check_val.get('type', 'hostPath') == 'hostPath':
continue
values[storageKey][storage] = {key: value for key, value in check_val.items() if key != 'hostPath'}
return values
if __name__ == '__main__':
if len(sys.argv) != 2:
exit(1)
if os.path.exists(sys.argv[1]):
with open(sys.argv[1], 'r') as f:
print(json.dumps(migrate(json.loads(f.read()))))

View File

@@ -20,9 +20,7 @@ adguardNetwork:
adguardStorage:
work:
type: ixVolume
hostPath: ""
datasetName: work
conf:
type: ixVolume
hostPath: ""
datasetName: conf

View File

@@ -4,7 +4,7 @@ description: Chia is a modern cryptocurrency built from scratch, designed to be
annotations:
title: Chia
type: application
version: 1.0.9
version: 1.0.10
apiVersion: v2
appVersion: 1.8.1
kubeVersion: '>=1.16.0-0'

View File

@@ -0,0 +1,29 @@
#!/usr/bin/python3
import json
import os
import sys
def migrate(values):
storageKey = 'chiaStorage'
storages = ['data', 'plots']
for storage in storages:
check_val = values.get(storageKey, {}).get(storage, {})
if not isinstance(check_val, dict) or not check_val or check_val.get('type', 'hostPath') == 'hostPath':
continue
values[storageKey][storage] = {key: value for key, value in check_val.items() if key != 'hostPath'}
return values
if __name__ == '__main__':
if len(sys.argv) != 2:
exit(1)
if os.path.exists(sys.argv[1]):
with open(sys.argv[1], 'r') as f:
print(json.dumps(migrate(json.loads(f.read()))))

View File

@@ -19,10 +19,8 @@ chiaConfig:
chiaStorage:
data:
type: ixVolume
hostPath: ''
datasetName: data
plots:
type: ixVolume
hostPath: /
datasetName: plots
additionalVolumes: []

View File

@@ -3,7 +3,7 @@ description: ClamAV is an open source (GPLv2) anti-virus toolkit.
annotations:
title: Clam AV
type: application
version: 1.0.1
version: 1.0.2
apiVersion: v2
appVersion: '1.0.1'
kubeVersion: '>=1.16.0-0'

View File

@@ -0,0 +1,29 @@
#!/usr/bin/python3
import json
import os
import sys
def migrate(values):
storageKey = 'clamavStorage'
storages = ['sigdb', 'scandir']
for storage in storages:
check_val = values.get(storageKey, {}).get(storage, {})
if not isinstance(check_val, dict) or not check_val or check_val.get('type', 'hostPath') == 'hostPath':
continue
values[storageKey][storage] = {key: value for key, value in check_val.items() if key != 'hostPath'}
return values
if __name__ == '__main__':
if len(sys.argv) != 2:
exit(1)
if os.path.exists(sys.argv[1]):
with open(sys.argv[1], 'r') as f:
print(json.dumps(migrate(json.loads(f.read()))))

View File

@@ -23,9 +23,7 @@ clamavNetwork:
clamavStorage:
sigdb:
type: ixVolume
hostPath: ''
datasetName: sig-db
scandir:
type: ixVolume
hostPath: ''
datasetName: scan-dir

View File

@@ -3,7 +3,7 @@ description: Lightweight universal DDNS Updater with web UI
annotations:
title: DDNS Updater
type: application
version: 1.0.2
version: 1.0.3
apiVersion: v2
appVersion: 'latest'
kubeVersion: '>=1.16.0-0'

View File

@@ -0,0 +1,29 @@
#!/usr/bin/python3
import json
import os
import sys
def migrate(values):
storageKey = 'ddnsStorage'
storages = ['data']
for storage in storages:
check_val = values.get(storageKey, {}).get(storage, {})
if not isinstance(check_val, dict) or not check_val or check_val.get('type', 'hostPath') == 'hostPath':
continue
values[storageKey][storage] = {key: value for key, value in check_val.items() if key != 'hostPath'}
return values
if __name__ == '__main__':
if len(sys.argv) != 2:
exit(1)
if os.path.exists(sys.argv[1]):
with open(sys.argv[1], 'r') as f:
print(json.dumps(migrate(json.loads(f.read()))))

View File

@@ -50,5 +50,4 @@ ddnsRunAs:
ddnsStorage:
data:
type: ixVolume
hostPath: ''
datasetName: data

View File

@@ -3,7 +3,7 @@ description: Gitea - Git with a cup of tea
annotations:
title: Gitea
type: application
version: 1.0.5
version: 1.0.6
apiVersion: v2
appVersion: '1.19.0'
kubeVersion: '>=1.16.0-0'

View File

@@ -0,0 +1,29 @@
#!/usr/bin/python3
import json
import os
import sys
def migrate(values):
storageKey = 'giteaStorage'
storages = ['data', 'config', 'pgData', 'pgBackup']
for storage in storages:
check_val = values.get(storageKey, {}).get(storage, {})
if not isinstance(check_val, dict) or not check_val or check_val.get('type', 'hostPath') == 'hostPath':
continue
values[storageKey][storage] = {key: value for key, value in check_val.items() if key != 'hostPath'}
return values
if __name__ == '__main__':
if len(sys.argv) != 2:
exit(1)
if os.path.exists(sys.argv[1]):
with open(sys.argv[1], 'r') as f:
print(json.dumps(migrate(json.loads(f.read()))))

View File

@@ -28,17 +28,13 @@ giteaRunAs:
giteaStorage:
data:
type: ixVolume
hostPath: ""
datasetName: data
config:
type: ixVolume
hostPath: ""
datasetName: config
pgData:
type: ixVolume
hostPath: ""
datasetName: pgData
pgBackup:
type: ixVolume
hostPath: ""
datasetName: pgBackup

View File

@@ -4,7 +4,7 @@ description: Interplanetary Filesystem - the Web3 standard for content-addressin
annotations:
title: IPFS
type: application
version: 1.0.10
version: 1.0.11
apiVersion: v2
appVersion: v0.20.0
kubeVersion: '>=1.16.0-0'

View File

@@ -0,0 +1,29 @@
#!/usr/bin/python3
import json
import os
import sys
def migrate(values):
storageKey = 'ipfsStorage'
storages = ['data', 'staging']
for storage in storages:
check_val = values.get(storageKey, {}).get(storage, {})
if not isinstance(check_val, dict) or not check_val or check_val.get('type', 'hostPath') == 'hostPath':
continue
values[storageKey][storage] = {key: value for key, value in check_val.items() if key != 'hostPath'}
return values
if __name__ == '__main__':
if len(sys.argv) != 2:
exit(1)
if os.path.exists(sys.argv[1]):
with open(sys.argv[1], 'r') as f:
print(json.dumps(migrate(json.loads(f.read()))))

View File

@@ -19,9 +19,7 @@ ipfsRunAs:
ipfsStorage:
data:
type: ixVolume
hostPath: ''
datasetName: data
staging:
type: ixVolume
hostPath: ''
datasetName: staging

View File

@@ -3,7 +3,7 @@ description: Jellyfin is a Free Software Media System that puts you in control o
annotations:
title: Jellyfin
type: application
version: 1.0.3
version: 1.0.4
apiVersion: v2
appVersion: '10.8.10'
kubeVersion: '>=1.16.0-0'

View File

@@ -0,0 +1,28 @@
#!/usr/bin/python3
import json
import os
import sys
def migrate(values):
storageKey = 'jellyfinStorage'
storages = ['config', 'cache', 'transcodes']
for storage in storages:
check_val = values.get(storageKey, {}).get(storage, {})
if not isinstance(check_val, dict) or not check_val or check_val.get('type', 'hostPath') == 'hostPath':
continue
values[storageKey][storage] = {key: value for key, value in check_val.items() if key != 'hostPath'}
return values
if __name__ == '__main__':
if len(sys.argv) != 2:
exit(1)
if os.path.exists(sys.argv[1]):
with open(sys.argv[1], 'r') as f:
print(json.dumps(migrate(json.loads(f.read()))))

View File

@@ -21,15 +21,12 @@ jellyfinRunAs:
jellyfinStorage:
config:
type: ixVolume
hostPath: ''
datasetName: config
cache:
type: ixVolume
hostPath: ''
datasetName: cache
transcodes:
type: ixVolume
hostPath: ''
datasetName: transcodes
medium: ''
size: ''

View File

@@ -3,7 +3,7 @@ description: Lidarr is a music collection manager for Usenet and BitTorrent user
annotations:
title: Lidarr
type: application
version: 1.0.7
version: 1.0.8
apiVersion: v2
appVersion: '1.1.3.2982'
kubeVersion: '>=1.16.0-0'

View File

@@ -0,0 +1,29 @@
#!/usr/bin/python3
import json
import os
import sys
def migrate(values):
storageKey = 'lidarrStorage'
storages = ['config']
for storage in storages:
check_val = values.get(storageKey, {}).get(storage, {})
if not isinstance(check_val, dict) or not check_val or check_val.get('type', 'hostPath') == 'hostPath':
continue
values[storageKey][storage] = {key: value for key, value in check_val.items() if key != 'hostPath'}
return values
if __name__ == '__main__':
if len(sys.argv) != 2:
exit(1)
if os.path.exists(sys.argv[1]):
with open(sys.argv[1], 'r') as f:
print(json.dumps(migrate(json.loads(f.read()))))

View File

@@ -20,6 +20,5 @@ lidarrRunAs:
lidarrStorage:
config:
type: ixVolume
hostPath: ''
datasetName: config
additionalStorages: []

View File

@@ -3,7 +3,7 @@ description: Minecraft is a sandbox game
annotations:
title: Minecraft
type: application
version: 1.0.1
version: 1.0.2
apiVersion: v2
appVersion: '2023.3.0'
kubeVersion: '>=1.16.0-0'

View File

@@ -0,0 +1,29 @@
#!/usr/bin/python3
import json
import os
import sys
def migrate(values):
storageKey = 'mcStorage'
storages = ['data']
for storage in storages:
check_val = values.get(storageKey, {}).get(storage, {})
if not isinstance(check_val, dict) or not check_val or check_val.get('type', 'hostPath') == 'hostPath':
continue
values[storageKey][storage] = {key: value for key, value in check_val.items() if key != 'hostPath'}
return values
if __name__ == '__main__':
if len(sys.argv) != 2:
exit(1)
if os.path.exists(sys.argv[1]):
with open(sys.argv[1], 'r') as f:
print(json.dumps(migrate(json.loads(f.read()))))

View File

@@ -100,6 +100,5 @@ mcNetwork:
mcStorage:
data:
type: ixVolume
hostPath: ''
datasetName: data
additionalStorages: []

View File

@@ -3,7 +3,7 @@ description: Mumble is a free, open source, low latency, high quality voice chat
annotations:
title: Mumble
type: application
version: 1.0.1
version: 1.0.2
apiVersion: v2
appVersion: 'v1.4.230'
kubeVersion: '>=1.16.0-0'

View File

@@ -0,0 +1,29 @@
#!/usr/bin/python3
import json
import os
import sys
def migrate(values):
storageKey = 'mumbleStorage'
storages = ['data']
for storage in storages:
check_val = values.get(storageKey, {}).get(storage, {})
if not isinstance(check_val, dict) or not check_val or check_val.get('type', 'hostPath') == 'hostPath':
continue
values[storageKey][storage] = {key: value for key, value in check_val.items() if key != 'hostPath'}
return values
if __name__ == '__main__':
if len(sys.argv) != 2:
exit(1)
if os.path.exists(sys.argv[1]):
with open(sys.argv[1], 'r') as f:
print(json.dumps(migrate(json.loads(f.read()))))

View File

@@ -25,5 +25,4 @@ mumbleNetwork:
mumbleStorage:
data:
type: ixVolume
hostPath: ''
datasetName: data

View File

@@ -3,7 +3,7 @@ description: Expose your services easily and securely
annotations:
title: Nginx Proxy Manager
type: application
version: 1.0.6
version: 1.0.7
apiVersion: v2
appVersion: 2.10.3
kubeVersion: '>=1.16.0-0'

View File

@@ -0,0 +1,29 @@
#!/usr/bin/python3
import json
import os
import sys
def migrate(values):
storageKey = 'npmStorage'
storages = ['data', 'certs']
for storage in storages:
check_val = values.get(storageKey, {}).get(storage, {})
if not isinstance(check_val, dict) or not check_val or check_val.get('type', 'hostPath') == 'hostPath':
continue
values[storageKey][storage] = {key: value for key, value in check_val.items() if key != 'hostPath'}
return values
if __name__ == '__main__':
if len(sys.argv) != 2:
exit(1)
if os.path.exists(sys.argv[1]):
with open(sys.argv[1], 'r') as f:
print(json.dumps(migrate(json.loads(f.read()))))

View File

@@ -15,11 +15,9 @@ npmNetwork:
npmStorage:
data:
type: ixVolume
hostPath: ''
datasetName: data
certs:
type: ixVolume
hostPath: ''
datasetName: certs
notes:
custom: |

View File

@@ -3,7 +3,7 @@ description: Overseerr is a free and open source software application for managi
annotations:
title: Overseerr
type: application
version: 1.0.2
version: 1.0.3
apiVersion: v2
appVersion: '1.32.5'
kubeVersion: '>=1.16.0-0'

View File

@@ -0,0 +1,29 @@
#!/usr/bin/python3
import json
import os
import sys
def migrate(values):
storageKey = 'overseerrStorage'
storages = ['config']
for storage in storages:
check_val = values.get(storageKey, {}).get(storage, {})
if not isinstance(check_val, dict) or not check_val or check_val.get('type', 'hostPath') == 'hostPath':
continue
values[storageKey][storage] = {key: value for key, value in check_val.items() if key != 'hostPath'}
return values
if __name__ == '__main__':
if len(sys.argv) != 2:
exit(1)
if os.path.exists(sys.argv[1]):
with open(sys.argv[1], 'r') as f:
print(json.dumps(migrate(json.loads(f.read()))))

View File

@@ -22,5 +22,4 @@ overseerrRunAs:
overseerrStorage:
config:
type: ixVolume
hostPath: ''
datasetName: config

View File

@@ -3,7 +3,7 @@ description: The qBittorrent project aims to provide an open-source software alt
annotations:
title: qBittorrent
type: application
version: 1.0.10
version: 1.0.11
apiVersion: v2
appVersion: '4.5.2'
kubeVersion: '>=1.16.0-0'

View File

@@ -0,0 +1,29 @@
#!/usr/bin/python3
import json
import os
import sys
def migrate(values):
storageKey = 'qbitStorage'
storages = ['downloads', 'config']
for storage in storages:
check_val = values.get(storageKey, {}).get(storage, {})
if not isinstance(check_val, dict) or not check_val or check_val.get('type', 'hostPath') == 'hostPath':
continue
values[storageKey][storage] = {key: value for key, value in check_val.items() if key != 'hostPath'}
return values
if __name__ == '__main__':
if len(sys.argv) != 2:
exit(1)
if os.path.exists(sys.argv[1]):
with open(sys.argv[1], 'r') as f:
print(json.dumps(migrate(json.loads(f.read()))))

View File

@@ -20,11 +20,9 @@ qbitRunAs:
qbitStorage:
downloads:
type: ixVolume
hostPath: ""
datasetName: downloads
config:
type: ixVolume
hostPath: ""
datasetName: config
notes:

View File

@@ -3,7 +3,7 @@ description: Radarr is a movie collection manager for Usenet and BitTorrent user
annotations:
title: Radarr
type: application
version: 1.0.10
version: 1.0.11
apiVersion: v2
appVersion: 4.5.2.7388
kubeVersion: '>=1.16.0-0'

View File

@@ -0,0 +1,29 @@
#!/usr/bin/python3
import json
import os
import sys
def migrate(values):
storageKey = 'radarrStorage'
storages = ['config']
for storage in storages:
check_val = values.get(storageKey, {}).get(storage, {})
if not isinstance(check_val, dict) or not check_val or check_val.get('type', 'hostPath') == 'hostPath':
continue
values[storageKey][storage] = {key: value for key, value in check_val.items() if key != 'hostPath'}
return values
if __name__ == '__main__':
if len(sys.argv) != 2:
exit(1)
if os.path.exists(sys.argv[1]):
with open(sys.argv[1], 'r') as f:
print(json.dumps(migrate(json.loads(f.read()))))

View File

@@ -20,6 +20,5 @@ radarrRunAs:
radarrStorage:
config:
type: ixVolume
hostPath: ''
datasetName: config
additionalStorages: []

View File

@@ -3,7 +3,7 @@ description: Sonarr is a PVR for Usenet and BitTorrent users.
annotations:
title: Sonarr
type: application
version: 1.0.7
version: 1.0.8
apiVersion: v2
appVersion: '3.0.10.1567'
kubeVersion: '>=1.16.0-0'

View File

@@ -0,0 +1,29 @@
#!/usr/bin/python3
import json
import os
import sys
def migrate(values):
storageKey = 'sonarrStorage'
storages = ['config']
for storage in storages:
check_val = values.get(storageKey, {}).get(storage, {})
if not isinstance(check_val, dict) or not check_val or check_val.get('type', 'hostPath') == 'hostPath':
continue
values[storageKey][storage] = {key: value for key, value in check_val.items() if key != 'hostPath'}
return values
if __name__ == '__main__':
if len(sys.argv) != 2:
exit(1)
if os.path.exists(sys.argv[1]):
with open(sys.argv[1], 'r') as f:
print(json.dumps(migrate(json.loads(f.read()))))

View File

@@ -20,6 +20,5 @@ sonarrRunAs:
sonarrStorage:
config:
type: ixVolume
hostPath: ''
datasetName: config
additionalStorages: []

View File

@@ -3,7 +3,7 @@ description: Tdarr is a Distributed Transcoding System
annotations:
title: Tdarr
type: application
version: 1.0.5
version: 1.0.6
apiVersion: v2
appVersion: '2.00.20.1'
kubeVersion: '>=1.16.0-0'

View File

@@ -0,0 +1,29 @@
#!/usr/bin/python3
import json
import os
import sys
def migrate(values):
storageKey = 'tdarrStorage'
storages = ['server', 'configs', 'logs', 'transcodes']
for storage in storages:
check_val = values.get(storageKey, {}).get(storage, {})
if not isinstance(check_val, dict) or not check_val or check_val.get('type', 'hostPath') == 'hostPath':
continue
values[storageKey][storage] = {key: value for key, value in check_val.items() if key != 'hostPath'}
return values
if __name__ == '__main__':
if len(sys.argv) != 2:
exit(1)
if os.path.exists(sys.argv[1]):
with open(sys.argv[1], 'r') as f:
print(json.dumps(migrate(json.loads(f.read()))))

View File

@@ -22,19 +22,15 @@ tdarrID:
tdarrStorage:
server:
type: ixVolume
hostPath: ''
datasetName: server
configs:
type: ixVolume
hostPath: ''
datasetName: configs
logs:
type: ixVolume
hostPath: ''
datasetName: logs
transcodes:
type: ixVolume
hostPath: ''
datasetName: transcodes
medium: ''
size: ''

View File

@@ -3,7 +3,7 @@ description: Terraria is a land of adventure! A land of mystery! A land that's y
annotations:
title: Terraria
type: application
version: 1.0.1
version: 1.0.2
apiVersion: v2
appVersion: '1.4.4.9'
kubeVersion: '>=1.16.0-0'

View File

@@ -0,0 +1,29 @@
#!/usr/bin/python3
import json
import os
import sys
def migrate(values):
storageKey = 'terrariaStorage'
storages = ['world', 'plugins']
for storage in storages:
check_val = values.get(storageKey, {}).get(storage, {})
if not isinstance(check_val, dict) or not check_val or check_val.get('type', 'hostPath') == 'hostPath':
continue
values[storageKey][storage] = {key: value for key, value in check_val.items() if key != 'hostPath'}
return values
if __name__ == '__main__':
if len(sys.argv) != 2:
exit(1)
if os.path.exists(sys.argv[1]):
with open(sys.argv[1], 'r') as f:
print(json.dumps(migrate(json.loads(f.read()))))

View File

@@ -36,9 +36,7 @@ terrariaNetwork:
terrariaStorage:
world:
type: ixVolume
hostPath: ''
datasetName: world
plugins:
type: ixVolume
hostPath: ''
datasetName: plugins

View File

@@ -4,7 +4,7 @@ description: Alternative implementation of the Bitwarden server API written in R
annotations:
title: Vaultwarden
type: application
version: 1.0.9
version: 1.0.10
apiVersion: v2
appVersion: '1.28.1'
kubeVersion: '>=1.16.0-0'

View File

@@ -0,0 +1,29 @@
#!/usr/bin/python3
import json
import os
import sys
def migrate(values):
storageKey = 'vaultwardenStorage'
storages = ['data', 'pgData', 'pgData']
for storage in storages:
check_val = values.get(storageKey, {}).get(storage, {})
if not isinstance(check_val, dict) or not check_val or check_val.get('type', 'hostPath') == 'hostPath':
continue
values[storageKey][storage] = {key: value for key, value in check_val.items() if key != 'hostPath'}
return values
if __name__ == '__main__':
if len(sys.argv) != 2:
exit(1)
if os.path.exists(sys.argv[1]):
with open(sys.argv[1], 'r') as f:
print(json.dumps(migrate(json.loads(f.read()))))

View File

@@ -27,13 +27,10 @@ vaultwardenRunAs:
vaultwardenStorage:
data:
type: ixVolume
hostPath: ''
datasetName: data
pgData:
type: ixVolume
hostPath: ''
datasetName: pgData
pgBackup:
type: ixVolume
hostPath: ''
datasetName: pgBackup

View File

@@ -3,7 +3,7 @@ description: High Performance, Kubernetes Native Object Storage
annotations:
title: MinIO
type: application
version: 1.0.9
version: 1.0.10
apiVersion: v2
appVersion: '2023-03-24'
kubeVersion: '>=1.16.0-0'

View File

@@ -0,0 +1,29 @@
#!/usr/bin/python3
import json
import os
import sys
def migrate(values):
storageKey = 'minioLogging'
storageSubKey = 'logsearch'
storages = ['pgData', 'pgData']
for storage in storages:
check_val = values.get(storageKey, {}).get(storageSubKey, {}).get(storage, {})
if not isinstance(check_val, dict) or not check_val or check_val.get('type', 'hostPath') == 'hostPath':
continue
values[storageKey][storageSubKey][storage] = {key: value for key, value in check_val.items() if key != 'hostPath'}
return values
if __name__ == '__main__':
if len(sys.argv) != 2:
exit(1)
if os.path.exists(sys.argv[1]):
with open(sys.argv[1], 'r') as f:
print(json.dumps(migrate(json.loads(f.read()))))

View File

@@ -42,9 +42,7 @@ minioLogging:
diskCapacityGB: 5
pgData:
type: ixVolume
hostPath: ''
datasetName: postgres-data
pgBackup:
type: ixVolume
hostPath: ''
datasetName: postgres-backup