apache AirFlow的Helm存储库:https://github.com/apache/airflow/tree/master/chart
AKS版本:1.16.13
from airflow import DAG
from datetime import datetime, timedelta
from airflow.contrib.operators.kubernetes_pod_operator import KubernetesPodOperator
from airflow.operators.dummy_operator import DummyOperator
default_args = {
'owner': 'airflow',
'depends_on_past': False,
'start_date': datetime.utcnow(),
'email': ['airflow@example.com'],
'email_on_failure': False,
'email_on_retry': False,
'retries': 1,
'retry_delay': timedelta(minutes=5)
}
dag = DAG(
'kubernetes_sample', default_args=default_args, schedule_interval=timedelta(minutes=10))
start = DummyOperator(task_id='run_this_first', dag=dag)
passing = KubernetesPodOperator(namespace='default',
image="python:3.8-slim-buster",
cmds=["python3","-c"],
arguments=["print('hello world')"],
labels={"foo": "bar"},
name="passing-test",
task_id="passing-task",
get_logs=True,
dag=dag
)
passing.set_upstream(start)
kubectl create secret docker-registry testquay \
--docker-server=quay.io \
--docker-username=<Profile name> \
--docker-password=<password>
from airflow import DAG
from datetime import datetime, timedelta
from airflow.contrib.operators.kubernetes_pod_operator import KubernetesPodOperator
from airflow.operators.dummy_operator import DummyOperator
from airflow.contrib.kubernetes import secret
import logging
import os
import sys
import traceback
try:
env_var_secret = secret.Secret(
deploy_type='env',
deploy_target='VERSION_NUMBER',
secret='myregistrykey',
key='VERSION_NUMBER',
)
default_args = {
'owner': 'airflow',
'depends_on_past': False,
'start_date': datetime.utcnow(),
'email': ['airflow@example.com'],
'email_on_failure': False,
'email_on_retry': False,
'retries': 1,
'retry_delay': timedelta(minutes=5)
}
dag = DAG(
'jordi_test_get_secert2', default_args=default_args, schedule_interval=timedelta(minutes=10))
start = DummyOperator(task_id='run_this_first', dag=dag)
quay_k8s = KubernetesPodOperator(
namespace='default',
name="passing-test7",
image='docker.io/test-pai-1',
image_pull_secrets=env_var_secret,
task_id="passing-task6",
get_logs=True,
dag=dag
)
start >> quay_k8s
except Exception as e:
error_message = {
"message": "An internal error ocurred"
,"error": str(e)
, "error information" : str(sys.exc_info())
, "traceback": str(traceback.format_exc())
}
logging.info(error_message)
File "/home/airflow/.local/lib/python3.6/site-packages/airflow/kubernetes/pod_generator.py", line 272, in __init__
for image_pull_secret in image_pull_secrets.split(','):
AttributeError: 'Secret' object has no attribute 'split'
from airflow import DAG
from datetime import datetime, timedelta
from airflow.contrib.operators.kubernetes_pod_operator import KubernetesPodOperator
from airflow.operators.dummy_operator import DummyOperator
from kubernetes.client import models as k8s
import logging
import os
import sys
import traceback
try:
default_args = {
'owner': 'airflow',
'depends_on_past': False,
'start_date': datetime.utcnow(),
'email': ['airflow@example.com'],
'email_on_failure': False,
'email_on_retry': False,
'retries': 1,
'retry_delay': timedelta(minutes=5)
}
dag = DAG(
'jordi_test2', default_args=default_args, schedule_interval=timedelta(minutes=10))
start = DummyOperator(task_id='run_this_first', dag=dag)
quay_k8s = KubernetesPodOperator(
namespace='default',
name="passing-test7",
image='docker.io/test-pai-1',
image_pull_secrets=[k8s.V1LocalObjectReference('myregistrykey')],
task_id="passing-task6",
get_logs=True,
dag=dag
)
start >> quay_k8s
except Exception as e:
error_message = {
"message": "An internal error ocurred"
,"error": str(e)
, "error information" : str(sys.exc_info())
, "traceback": str(traceback.format_exc())
}
logging.info(error_message)
但请告诉我这个错误:
for image_pull_secret in image_pull_secrets.split(','):
AttributeError: 'list' object has no attribute 'split'
如果查看Airflow文档中的KubernetesPodOperator:https://Airflow.apache.org/docs/stable/_api/Airflow/contrib/operators/Kubernetes_pod_operator/index.html
image_pull_secrets (str) – Any image pull secrets to be given to the pod. If more than one secret is required, provide a comma separated list: secret_a,secret_b
正确的写法是怎样的?
会要求澄清作为评论,但我没有这个名声。因此提供了一个带有一些假设的解决方案。
kubectl create secret docker-registry testquay \
--docker-server=quay.io \
--docker-username=<Profile name> \
--docker-password=<password>
image_pull_secrets=[k8s.V1LocalObjectReference('myregistrykey')],
image_pull_secrets=[k8s.V1LocalObjectReference('testquay')],
我们已尝试设置hivemq清单文件。我们的私有存储库中有hivemq docker映像 Step1:我已经登录到私有存储库,就像 APIPversion:extensions/v1beta1种类:部署元数据:名称:hivemq spec:replicas:1模板:元数据:标签:名称:hivemq1 spec:containers:-env:xxxxx我传递的一些环境值名称:hivemq image
我是一个新的工作与气流和库伯内特。我正在尝试使用库伯内特斯的阿帕奇气流。 为了部署它,我使用了以下图表:https://github.com/apache/airflow/tree/master/chart。 但似乎不起作用。
因此,我在gitlab上有一个项目test-project(https://gitlab.com/group/test-project),这是一个我使用gitlab管道构建和部署的java应用程序。在构建阶段,我正在执行,包test-project.jar被推送到gitlab maven存储库(https://gitlab.com/group/test-project/-/packages)。 现
我已经构建了Flink的快照版本,并希望将其部署到我的私有nexus存储库中。我尝试在pom中添加以下配置。xml 然后运行“mvn部署”命令。不幸的是,maven仍然试图将Flink的快照部署到官方存储库中: [错误]无法执行目标组织。阿帕奇。专家插件:maven部署插件:2.8.2:project上的部署(默认部署):未能部署工件:无法传输工件组织。阿帕奇。flink:force shadin
本文档介绍使用 docker-compose 部署 harbor 私有仓库的步骤,你也可以使用 docker 官方的 registry 镜像部署私有仓库(部署 Docker Registry)。 使用的变量 本文档用到的变量定义如下: $ export NODE_IP=10.64.3.7 # 当前部署 harbor 的节点 IP $ 下载文件 从 docker compose 发布页面下载最新的
null 我是不是漏了什么? 谢谢