diff --git a/api/api-rules/violation_exceptions.list b/api/api-rules/violation_exceptions.list index d8f5be89e..fd4c03d2b 100644 --- a/api/api-rules/violation_exceptions.list +++ b/api/api-rules/violation_exceptions.list @@ -24,10 +24,11 @@ API rule violation: list_type_missing,k8s.io/apimachinery/pkg/apis/meta/v1,Table API rule violation: list_type_missing,k8s.io/apimachinery/pkg/apis/meta/v1,TableRow,Cells API rule violation: list_type_missing,k8s.io/apimachinery/pkg/apis/meta/v1,TableRow,Conditions API rule violation: list_type_missing,k8s.io/apimachinery/pkg/apis/meta/v1,UpdateOptions,DryRun -API rule violation: list_type_missing,kubesphere.io/kubesphere/pkg/apis/servicemesh/v1alpha2,ServicePolicyList,Items -API rule violation: list_type_missing,kubesphere.io/kubesphere/pkg/apis/servicemesh/v1alpha2,ServicePolicyStatus,Conditions -API rule violation: list_type_missing,kubesphere.io/kubesphere/pkg/apis/servicemesh/v1alpha2,StrategyList,Items -API rule violation: list_type_missing,kubesphere.io/kubesphere/pkg/apis/servicemesh/v1alpha2,StrategyStatus,Conditions +API rule violation: list_type_missing,k8s.io/apimachinery/pkg/runtime,RawExtension,Raw +API rule violation: list_type_missing,k8s.io/apimachinery/pkg/runtime,Unknown,Raw +API rule violation: list_type_missing,kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3,DevOpsProjectList,Items +API rule violation: list_type_missing,kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3,NoScmPipeline,Parameters +API rule violation: list_type_missing,kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3,PipelineList,Items API rule violation: names_match,k8s.io/apimachinery/pkg/apis/meta/v1,APIResourceList,APIResources API rule violation: names_match,k8s.io/apimachinery/pkg/apis/meta/v1,Duration,Duration API rule violation: names_match,k8s.io/apimachinery/pkg/apis/meta/v1,InternalEvent,Object @@ -35,24 +36,51 @@ API rule violation: names_match,k8s.io/apimachinery/pkg/apis/meta/v1,InternalEve API rule violation: names_match,k8s.io/apimachinery/pkg/apis/meta/v1,MicroTime,Time API rule violation: names_match,k8s.io/apimachinery/pkg/apis/meta/v1,StatusCause,Type API rule violation: names_match,k8s.io/apimachinery/pkg/apis/meta/v1,Time,Time -API rule violation: names_match,kubesphere.io/kubesphere/pkg/apis/servicemesh/v1alpha2,DestinationRuleSpecTemplate,ObjectMeta -API rule violation: names_match,kubesphere.io/kubesphere/pkg/apis/servicemesh/v1alpha2,ServicePolicyCondition,LastProbeTime -API rule violation: names_match,kubesphere.io/kubesphere/pkg/apis/servicemesh/v1alpha2,ServicePolicyCondition,LastTransitionTime -API rule violation: names_match,kubesphere.io/kubesphere/pkg/apis/servicemesh/v1alpha2,ServicePolicyCondition,Message -API rule violation: names_match,kubesphere.io/kubesphere/pkg/apis/servicemesh/v1alpha2,ServicePolicyCondition,Reason -API rule violation: names_match,kubesphere.io/kubesphere/pkg/apis/servicemesh/v1alpha2,ServicePolicyCondition,Status -API rule violation: names_match,kubesphere.io/kubesphere/pkg/apis/servicemesh/v1alpha2,ServicePolicyCondition,Type -API rule violation: names_match,kubesphere.io/kubesphere/pkg/apis/servicemesh/v1alpha2,ServicePolicyStatus,CompletionTime -API rule violation: names_match,kubesphere.io/kubesphere/pkg/apis/servicemesh/v1alpha2,ServicePolicyStatus,Conditions -API rule violation: names_match,kubesphere.io/kubesphere/pkg/apis/servicemesh/v1alpha2,ServicePolicyStatus,StartTime -API rule violation: names_match,kubesphere.io/kubesphere/pkg/apis/servicemesh/v1alpha2,StrategyCondition,LastProbeTime -API rule violation: names_match,kubesphere.io/kubesphere/pkg/apis/servicemesh/v1alpha2,StrategyCondition,LastTransitionTime -API rule violation: names_match,kubesphere.io/kubesphere/pkg/apis/servicemesh/v1alpha2,StrategyCondition,Message -API rule violation: names_match,kubesphere.io/kubesphere/pkg/apis/servicemesh/v1alpha2,StrategyCondition,Reason -API rule violation: names_match,kubesphere.io/kubesphere/pkg/apis/servicemesh/v1alpha2,StrategyCondition,Status -API rule violation: names_match,kubesphere.io/kubesphere/pkg/apis/servicemesh/v1alpha2,StrategyCondition,Type -API rule violation: names_match,kubesphere.io/kubesphere/pkg/apis/servicemesh/v1alpha2,StrategySpec,GovernorVersion -API rule violation: names_match,kubesphere.io/kubesphere/pkg/apis/servicemesh/v1alpha2,StrategySpec,PrincipalVersion -API rule violation: names_match,kubesphere.io/kubesphere/pkg/apis/servicemesh/v1alpha2,StrategyStatus,CompletionTime -API rule violation: names_match,kubesphere.io/kubesphere/pkg/apis/servicemesh/v1alpha2,StrategyStatus,Conditions -API rule violation: names_match,kubesphere.io/kubesphere/pkg/apis/servicemesh/v1alpha2,StrategyStatus,StartTime +API rule violation: names_match,k8s.io/apimachinery/pkg/runtime,Unknown,ContentEncoding +API rule violation: names_match,k8s.io/apimachinery/pkg/runtime,Unknown,ContentType +API rule violation: names_match,k8s.io/apimachinery/pkg/runtime,Unknown,Raw +API rule violation: names_match,kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3,BitbucketServerSource,ApiUri +API rule violation: names_match,kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3,BitbucketServerSource,CloneOption +API rule violation: names_match,kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3,BitbucketServerSource,CredentialId +API rule violation: names_match,kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3,BitbucketServerSource,DiscoverBranches +API rule violation: names_match,kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3,BitbucketServerSource,DiscoverPRFromForks +API rule violation: names_match,kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3,BitbucketServerSource,DiscoverPRFromOrigin +API rule violation: names_match,kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3,BitbucketServerSource,RegexFilter +API rule violation: names_match,kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3,BitbucketServerSource,ScmId +API rule violation: names_match,kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3,DiscarderProperty,DaysToKeep +API rule violation: names_match,kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3,DiscarderProperty,NumToKeep +API rule violation: names_match,kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3,GitSource,CloneOption +API rule violation: names_match,kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3,GitSource,CredentialId +API rule violation: names_match,kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3,GitSource,DiscoverBranches +API rule violation: names_match,kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3,GitSource,RegexFilter +API rule violation: names_match,kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3,GitSource,ScmId +API rule violation: names_match,kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3,GithubSource,ApiUri +API rule violation: names_match,kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3,GithubSource,CloneOption +API rule violation: names_match,kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3,GithubSource,CredentialId +API rule violation: names_match,kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3,GithubSource,DiscoverBranches +API rule violation: names_match,kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3,GithubSource,DiscoverPRFromForks +API rule violation: names_match,kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3,GithubSource,DiscoverPRFromOrigin +API rule violation: names_match,kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3,GithubSource,RegexFilter +API rule violation: names_match,kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3,GithubSource,ScmId +API rule violation: names_match,kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3,MultiBranchJobTrigger,CreateActionJobsToTrigger +API rule violation: names_match,kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3,MultiBranchJobTrigger,DeleteActionJobsToTrigger +API rule violation: names_match,kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3,MultiBranchPipeline,BitbucketServerSource +API rule violation: names_match,kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3,MultiBranchPipeline,Description +API rule violation: names_match,kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3,MultiBranchPipeline,GitHubSource +API rule violation: names_match,kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3,MultiBranchPipeline,GitSource +API rule violation: names_match,kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3,MultiBranchPipeline,MultiBranchJobTrigger +API rule violation: names_match,kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3,MultiBranchPipeline,ScriptPath +API rule violation: names_match,kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3,MultiBranchPipeline,SingleSvnSource +API rule violation: names_match,kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3,MultiBranchPipeline,SourceType +API rule violation: names_match,kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3,MultiBranchPipeline,SvnSource +API rule violation: names_match,kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3,MultiBranchPipeline,TimerTrigger +API rule violation: names_match,kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3,NoScmPipeline,Description +API rule violation: names_match,kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3,NoScmPipeline,DisableConcurrent +API rule violation: names_match,kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3,NoScmPipeline,RemoteTrigger +API rule violation: names_match,kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3,NoScmPipeline,TimerTrigger +API rule violation: names_match,kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3,Parameter,DefaultValue +API rule violation: names_match,kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3,PipelineSpec,MultiBranchPipeline +API rule violation: names_match,kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3,SingleSvnSource,CredentialId +API rule violation: names_match,kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3,SingleSvnSource,ScmId +API rule violation: names_match,kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3,SvnSource,CredentialId +API rule violation: names_match,kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3,SvnSource,ScmId diff --git a/config/samples/devops_v1alpha3_pipeline.yaml b/config/samples/devops_v1alpha3_pipeline.yaml new file mode 100644 index 000000000..b9c9fbba3 --- /dev/null +++ b/config/samples/devops_v1alpha3_pipeline.yaml @@ -0,0 +1,9 @@ +apiVersion: devops.kubesphere.io/v1alpha3 +kind: Pipeline +metadata: + labels: + controller-tools.k8s.io: "1.0" + name: pipeline-sample +spec: + # Add fields here + foo: bar diff --git a/pkg/apis/devops/v1alpha3/openapi_generated.go b/pkg/apis/devops/v1alpha3/openapi_generated.go index c17f90d9d..e7189ecf9 100644 --- a/pkg/apis/devops/v1alpha3/openapi_generated.go +++ b/pkg/apis/devops/v1alpha3/openapi_generated.go @@ -30,60 +30,78 @@ import ( func GetOpenAPIDefinitions(ref common.ReferenceCallback) map[string]common.OpenAPIDefinition { return map[string]common.OpenAPIDefinition{ - "k8s.io/apimachinery/pkg/apis/meta/v1.APIGroup": schema_pkg_apis_meta_v1_APIGroup(ref), - "k8s.io/apimachinery/pkg/apis/meta/v1.APIGroupList": schema_pkg_apis_meta_v1_APIGroupList(ref), - "k8s.io/apimachinery/pkg/apis/meta/v1.APIResource": schema_pkg_apis_meta_v1_APIResource(ref), - "k8s.io/apimachinery/pkg/apis/meta/v1.APIResourceList": schema_pkg_apis_meta_v1_APIResourceList(ref), - "k8s.io/apimachinery/pkg/apis/meta/v1.APIVersions": schema_pkg_apis_meta_v1_APIVersions(ref), - "k8s.io/apimachinery/pkg/apis/meta/v1.CreateOptions": schema_pkg_apis_meta_v1_CreateOptions(ref), - "k8s.io/apimachinery/pkg/apis/meta/v1.DeleteOptions": schema_pkg_apis_meta_v1_DeleteOptions(ref), - "k8s.io/apimachinery/pkg/apis/meta/v1.Duration": schema_pkg_apis_meta_v1_Duration(ref), - "k8s.io/apimachinery/pkg/apis/meta/v1.ExportOptions": schema_pkg_apis_meta_v1_ExportOptions(ref), - "k8s.io/apimachinery/pkg/apis/meta/v1.FieldsV1": schema_pkg_apis_meta_v1_FieldsV1(ref), - "k8s.io/apimachinery/pkg/apis/meta/v1.GetOptions": schema_pkg_apis_meta_v1_GetOptions(ref), - "k8s.io/apimachinery/pkg/apis/meta/v1.GroupKind": schema_pkg_apis_meta_v1_GroupKind(ref), - "k8s.io/apimachinery/pkg/apis/meta/v1.GroupResource": schema_pkg_apis_meta_v1_GroupResource(ref), - "k8s.io/apimachinery/pkg/apis/meta/v1.GroupVersion": schema_pkg_apis_meta_v1_GroupVersion(ref), - "k8s.io/apimachinery/pkg/apis/meta/v1.GroupVersionForDiscovery": schema_pkg_apis_meta_v1_GroupVersionForDiscovery(ref), - "k8s.io/apimachinery/pkg/apis/meta/v1.GroupVersionKind": schema_pkg_apis_meta_v1_GroupVersionKind(ref), - "k8s.io/apimachinery/pkg/apis/meta/v1.GroupVersionResource": schema_pkg_apis_meta_v1_GroupVersionResource(ref), - "k8s.io/apimachinery/pkg/apis/meta/v1.InternalEvent": schema_pkg_apis_meta_v1_InternalEvent(ref), - "k8s.io/apimachinery/pkg/apis/meta/v1.LabelSelector": schema_pkg_apis_meta_v1_LabelSelector(ref), - "k8s.io/apimachinery/pkg/apis/meta/v1.LabelSelectorRequirement": schema_pkg_apis_meta_v1_LabelSelectorRequirement(ref), - "k8s.io/apimachinery/pkg/apis/meta/v1.List": schema_pkg_apis_meta_v1_List(ref), - "k8s.io/apimachinery/pkg/apis/meta/v1.ListMeta": schema_pkg_apis_meta_v1_ListMeta(ref), - "k8s.io/apimachinery/pkg/apis/meta/v1.ListOptions": schema_pkg_apis_meta_v1_ListOptions(ref), - "k8s.io/apimachinery/pkg/apis/meta/v1.ManagedFieldsEntry": schema_pkg_apis_meta_v1_ManagedFieldsEntry(ref), - "k8s.io/apimachinery/pkg/apis/meta/v1.MicroTime": schema_pkg_apis_meta_v1_MicroTime(ref), - "k8s.io/apimachinery/pkg/apis/meta/v1.ObjectMeta": schema_pkg_apis_meta_v1_ObjectMeta(ref), - "k8s.io/apimachinery/pkg/apis/meta/v1.OwnerReference": schema_pkg_apis_meta_v1_OwnerReference(ref), - "k8s.io/apimachinery/pkg/apis/meta/v1.PartialObjectMetadata": schema_pkg_apis_meta_v1_PartialObjectMetadata(ref), - "k8s.io/apimachinery/pkg/apis/meta/v1.PartialObjectMetadataList": schema_pkg_apis_meta_v1_PartialObjectMetadataList(ref), - "k8s.io/apimachinery/pkg/apis/meta/v1.Patch": schema_pkg_apis_meta_v1_Patch(ref), - "k8s.io/apimachinery/pkg/apis/meta/v1.PatchOptions": schema_pkg_apis_meta_v1_PatchOptions(ref), - "k8s.io/apimachinery/pkg/apis/meta/v1.Preconditions": schema_pkg_apis_meta_v1_Preconditions(ref), - "k8s.io/apimachinery/pkg/apis/meta/v1.RootPaths": schema_pkg_apis_meta_v1_RootPaths(ref), - "k8s.io/apimachinery/pkg/apis/meta/v1.ServerAddressByClientCIDR": schema_pkg_apis_meta_v1_ServerAddressByClientCIDR(ref), - "k8s.io/apimachinery/pkg/apis/meta/v1.Status": schema_pkg_apis_meta_v1_Status(ref), - "k8s.io/apimachinery/pkg/apis/meta/v1.StatusCause": schema_pkg_apis_meta_v1_StatusCause(ref), - "k8s.io/apimachinery/pkg/apis/meta/v1.StatusDetails": schema_pkg_apis_meta_v1_StatusDetails(ref), - "k8s.io/apimachinery/pkg/apis/meta/v1.Table": schema_pkg_apis_meta_v1_Table(ref), - "k8s.io/apimachinery/pkg/apis/meta/v1.TableColumnDefinition": schema_pkg_apis_meta_v1_TableColumnDefinition(ref), - "k8s.io/apimachinery/pkg/apis/meta/v1.TableOptions": schema_pkg_apis_meta_v1_TableOptions(ref), - "k8s.io/apimachinery/pkg/apis/meta/v1.TableRow": schema_pkg_apis_meta_v1_TableRow(ref), - "k8s.io/apimachinery/pkg/apis/meta/v1.TableRowCondition": schema_pkg_apis_meta_v1_TableRowCondition(ref), - "k8s.io/apimachinery/pkg/apis/meta/v1.Time": schema_pkg_apis_meta_v1_Time(ref), - "k8s.io/apimachinery/pkg/apis/meta/v1.Timestamp": schema_pkg_apis_meta_v1_Timestamp(ref), - "k8s.io/apimachinery/pkg/apis/meta/v1.TypeMeta": schema_pkg_apis_meta_v1_TypeMeta(ref), - "k8s.io/apimachinery/pkg/apis/meta/v1.UpdateOptions": schema_pkg_apis_meta_v1_UpdateOptions(ref), - "k8s.io/apimachinery/pkg/apis/meta/v1.WatchEvent": schema_pkg_apis_meta_v1_WatchEvent(ref), - "k8s.io/apimachinery/pkg/runtime.RawExtension": schema_k8sio_apimachinery_pkg_runtime_RawExtension(ref), - "k8s.io/apimachinery/pkg/runtime.TypeMeta": schema_k8sio_apimachinery_pkg_runtime_TypeMeta(ref), - "k8s.io/apimachinery/pkg/runtime.Unknown": schema_k8sio_apimachinery_pkg_runtime_Unknown(ref), - "kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3.DevOpsProject": schema_pkg_apis_devops_v1alpha3_DevOpsProject(ref), - "kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3.DevOpsProjectList": schema_pkg_apis_devops_v1alpha3_DevOpsProjectList(ref), - "kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3.DevOpsProjectSpec": schema_pkg_apis_devops_v1alpha3_DevOpsProjectSpec(ref), - "kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3.DevOpsProjectStatus": schema_pkg_apis_devops_v1alpha3_DevOpsProjectStatus(ref), + "k8s.io/apimachinery/pkg/apis/meta/v1.APIGroup": schema_pkg_apis_meta_v1_APIGroup(ref), + "k8s.io/apimachinery/pkg/apis/meta/v1.APIGroupList": schema_pkg_apis_meta_v1_APIGroupList(ref), + "k8s.io/apimachinery/pkg/apis/meta/v1.APIResource": schema_pkg_apis_meta_v1_APIResource(ref), + "k8s.io/apimachinery/pkg/apis/meta/v1.APIResourceList": schema_pkg_apis_meta_v1_APIResourceList(ref), + "k8s.io/apimachinery/pkg/apis/meta/v1.APIVersions": schema_pkg_apis_meta_v1_APIVersions(ref), + "k8s.io/apimachinery/pkg/apis/meta/v1.CreateOptions": schema_pkg_apis_meta_v1_CreateOptions(ref), + "k8s.io/apimachinery/pkg/apis/meta/v1.DeleteOptions": schema_pkg_apis_meta_v1_DeleteOptions(ref), + "k8s.io/apimachinery/pkg/apis/meta/v1.Duration": schema_pkg_apis_meta_v1_Duration(ref), + "k8s.io/apimachinery/pkg/apis/meta/v1.ExportOptions": schema_pkg_apis_meta_v1_ExportOptions(ref), + "k8s.io/apimachinery/pkg/apis/meta/v1.FieldsV1": schema_pkg_apis_meta_v1_FieldsV1(ref), + "k8s.io/apimachinery/pkg/apis/meta/v1.GetOptions": schema_pkg_apis_meta_v1_GetOptions(ref), + "k8s.io/apimachinery/pkg/apis/meta/v1.GroupKind": schema_pkg_apis_meta_v1_GroupKind(ref), + "k8s.io/apimachinery/pkg/apis/meta/v1.GroupResource": schema_pkg_apis_meta_v1_GroupResource(ref), + "k8s.io/apimachinery/pkg/apis/meta/v1.GroupVersion": schema_pkg_apis_meta_v1_GroupVersion(ref), + "k8s.io/apimachinery/pkg/apis/meta/v1.GroupVersionForDiscovery": schema_pkg_apis_meta_v1_GroupVersionForDiscovery(ref), + "k8s.io/apimachinery/pkg/apis/meta/v1.GroupVersionKind": schema_pkg_apis_meta_v1_GroupVersionKind(ref), + "k8s.io/apimachinery/pkg/apis/meta/v1.GroupVersionResource": schema_pkg_apis_meta_v1_GroupVersionResource(ref), + "k8s.io/apimachinery/pkg/apis/meta/v1.InternalEvent": schema_pkg_apis_meta_v1_InternalEvent(ref), + "k8s.io/apimachinery/pkg/apis/meta/v1.LabelSelector": schema_pkg_apis_meta_v1_LabelSelector(ref), + "k8s.io/apimachinery/pkg/apis/meta/v1.LabelSelectorRequirement": schema_pkg_apis_meta_v1_LabelSelectorRequirement(ref), + "k8s.io/apimachinery/pkg/apis/meta/v1.List": schema_pkg_apis_meta_v1_List(ref), + "k8s.io/apimachinery/pkg/apis/meta/v1.ListMeta": schema_pkg_apis_meta_v1_ListMeta(ref), + "k8s.io/apimachinery/pkg/apis/meta/v1.ListOptions": schema_pkg_apis_meta_v1_ListOptions(ref), + "k8s.io/apimachinery/pkg/apis/meta/v1.ManagedFieldsEntry": schema_pkg_apis_meta_v1_ManagedFieldsEntry(ref), + "k8s.io/apimachinery/pkg/apis/meta/v1.MicroTime": schema_pkg_apis_meta_v1_MicroTime(ref), + "k8s.io/apimachinery/pkg/apis/meta/v1.ObjectMeta": schema_pkg_apis_meta_v1_ObjectMeta(ref), + "k8s.io/apimachinery/pkg/apis/meta/v1.OwnerReference": schema_pkg_apis_meta_v1_OwnerReference(ref), + "k8s.io/apimachinery/pkg/apis/meta/v1.PartialObjectMetadata": schema_pkg_apis_meta_v1_PartialObjectMetadata(ref), + "k8s.io/apimachinery/pkg/apis/meta/v1.PartialObjectMetadataList": schema_pkg_apis_meta_v1_PartialObjectMetadataList(ref), + "k8s.io/apimachinery/pkg/apis/meta/v1.Patch": schema_pkg_apis_meta_v1_Patch(ref), + "k8s.io/apimachinery/pkg/apis/meta/v1.PatchOptions": schema_pkg_apis_meta_v1_PatchOptions(ref), + "k8s.io/apimachinery/pkg/apis/meta/v1.Preconditions": schema_pkg_apis_meta_v1_Preconditions(ref), + "k8s.io/apimachinery/pkg/apis/meta/v1.RootPaths": schema_pkg_apis_meta_v1_RootPaths(ref), + "k8s.io/apimachinery/pkg/apis/meta/v1.ServerAddressByClientCIDR": schema_pkg_apis_meta_v1_ServerAddressByClientCIDR(ref), + "k8s.io/apimachinery/pkg/apis/meta/v1.Status": schema_pkg_apis_meta_v1_Status(ref), + "k8s.io/apimachinery/pkg/apis/meta/v1.StatusCause": schema_pkg_apis_meta_v1_StatusCause(ref), + "k8s.io/apimachinery/pkg/apis/meta/v1.StatusDetails": schema_pkg_apis_meta_v1_StatusDetails(ref), + "k8s.io/apimachinery/pkg/apis/meta/v1.Table": schema_pkg_apis_meta_v1_Table(ref), + "k8s.io/apimachinery/pkg/apis/meta/v1.TableColumnDefinition": schema_pkg_apis_meta_v1_TableColumnDefinition(ref), + "k8s.io/apimachinery/pkg/apis/meta/v1.TableOptions": schema_pkg_apis_meta_v1_TableOptions(ref), + "k8s.io/apimachinery/pkg/apis/meta/v1.TableRow": schema_pkg_apis_meta_v1_TableRow(ref), + "k8s.io/apimachinery/pkg/apis/meta/v1.TableRowCondition": schema_pkg_apis_meta_v1_TableRowCondition(ref), + "k8s.io/apimachinery/pkg/apis/meta/v1.Time": schema_pkg_apis_meta_v1_Time(ref), + "k8s.io/apimachinery/pkg/apis/meta/v1.Timestamp": schema_pkg_apis_meta_v1_Timestamp(ref), + "k8s.io/apimachinery/pkg/apis/meta/v1.TypeMeta": schema_pkg_apis_meta_v1_TypeMeta(ref), + "k8s.io/apimachinery/pkg/apis/meta/v1.UpdateOptions": schema_pkg_apis_meta_v1_UpdateOptions(ref), + "k8s.io/apimachinery/pkg/apis/meta/v1.WatchEvent": schema_pkg_apis_meta_v1_WatchEvent(ref), + "k8s.io/apimachinery/pkg/runtime.RawExtension": schema_k8sio_apimachinery_pkg_runtime_RawExtension(ref), + "k8s.io/apimachinery/pkg/runtime.TypeMeta": schema_k8sio_apimachinery_pkg_runtime_TypeMeta(ref), + "k8s.io/apimachinery/pkg/runtime.Unknown": schema_k8sio_apimachinery_pkg_runtime_Unknown(ref), + "kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3.BitbucketServerSource": schema_pkg_apis_devops_v1alpha3_BitbucketServerSource(ref), + "kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3.DevOpsProject": schema_pkg_apis_devops_v1alpha3_DevOpsProject(ref), + "kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3.DevOpsProjectList": schema_pkg_apis_devops_v1alpha3_DevOpsProjectList(ref), + "kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3.DevOpsProjectSpec": schema_pkg_apis_devops_v1alpha3_DevOpsProjectSpec(ref), + "kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3.DevOpsProjectStatus": schema_pkg_apis_devops_v1alpha3_DevOpsProjectStatus(ref), + "kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3.DiscarderProperty": schema_pkg_apis_devops_v1alpha3_DiscarderProperty(ref), + "kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3.DiscoverPRFromForks": schema_pkg_apis_devops_v1alpha3_DiscoverPRFromForks(ref), + "kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3.GitCloneOption": schema_pkg_apis_devops_v1alpha3_GitCloneOption(ref), + "kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3.GitSource": schema_pkg_apis_devops_v1alpha3_GitSource(ref), + "kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3.GithubSource": schema_pkg_apis_devops_v1alpha3_GithubSource(ref), + "kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3.MultiBranchJobTrigger": schema_pkg_apis_devops_v1alpha3_MultiBranchJobTrigger(ref), + "kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3.MultiBranchPipeline": schema_pkg_apis_devops_v1alpha3_MultiBranchPipeline(ref), + "kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3.NoScmPipeline": schema_pkg_apis_devops_v1alpha3_NoScmPipeline(ref), + "kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3.Parameter": schema_pkg_apis_devops_v1alpha3_Parameter(ref), + "kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3.Pipeline": schema_pkg_apis_devops_v1alpha3_Pipeline(ref), + "kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3.PipelineList": schema_pkg_apis_devops_v1alpha3_PipelineList(ref), + "kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3.PipelineSpec": schema_pkg_apis_devops_v1alpha3_PipelineSpec(ref), + "kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3.PipelineStatus": schema_pkg_apis_devops_v1alpha3_PipelineStatus(ref), + "kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3.RemoteTrigger": schema_pkg_apis_devops_v1alpha3_RemoteTrigger(ref), + "kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3.SingleSvnSource": schema_pkg_apis_devops_v1alpha3_SingleSvnSource(ref), + "kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3.SvnSource": schema_pkg_apis_devops_v1alpha3_SvnSource(ref), + "kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3.TimerTrigger": schema_pkg_apis_devops_v1alpha3_TimerTrigger(ref), } } @@ -2245,6 +2263,78 @@ func schema_k8sio_apimachinery_pkg_runtime_Unknown(ref common.ReferenceCallback) } } +func schema_pkg_apis_devops_v1alpha3_BitbucketServerSource(ref common.ReferenceCallback) common.OpenAPIDefinition { + return common.OpenAPIDefinition{ + Schema: spec.Schema{ + SchemaProps: spec.SchemaProps{ + Type: []string{"object"}, + Properties: map[string]spec.Schema{ + "scm_id": { + SchemaProps: spec.SchemaProps{ + Type: []string{"string"}, + Format: "", + }, + }, + "owner": { + SchemaProps: spec.SchemaProps{ + Type: []string{"string"}, + Format: "", + }, + }, + "repo": { + SchemaProps: spec.SchemaProps{ + Type: []string{"string"}, + Format: "", + }, + }, + "credential_id": { + SchemaProps: spec.SchemaProps{ + Type: []string{"string"}, + Format: "", + }, + }, + "api_uri": { + SchemaProps: spec.SchemaProps{ + Type: []string{"string"}, + Format: "", + }, + }, + "discover_branches": { + SchemaProps: spec.SchemaProps{ + Type: []string{"integer"}, + Format: "int32", + }, + }, + "discover_pr_from_origin": { + SchemaProps: spec.SchemaProps{ + Type: []string{"integer"}, + Format: "int32", + }, + }, + "discover_pr_from_forks": { + SchemaProps: spec.SchemaProps{ + Ref: ref("kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3.DiscoverPRFromForks"), + }, + }, + "git_clone_option": { + SchemaProps: spec.SchemaProps{ + Ref: ref("kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3.GitCloneOption"), + }, + }, + "regex_filter": { + SchemaProps: spec.SchemaProps{ + Type: []string{"string"}, + Format: "", + }, + }, + }, + }, + }, + Dependencies: []string{ + "kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3.DiscoverPRFromForks", "kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3.GitCloneOption"}, + } +} + func schema_pkg_apis_devops_v1alpha3_DevOpsProject(ref common.ReferenceCallback) common.OpenAPIDefinition { return common.OpenAPIDefinition{ Schema: spec.Schema{ @@ -2366,3 +2456,659 @@ func schema_pkg_apis_devops_v1alpha3_DevOpsProjectStatus(ref common.ReferenceCal }, } } + +func schema_pkg_apis_devops_v1alpha3_DiscarderProperty(ref common.ReferenceCallback) common.OpenAPIDefinition { + return common.OpenAPIDefinition{ + Schema: spec.Schema{ + SchemaProps: spec.SchemaProps{ + Type: []string{"object"}, + Properties: map[string]spec.Schema{ + "days_to_keep": { + SchemaProps: spec.SchemaProps{ + Type: []string{"string"}, + Format: "", + }, + }, + "num_to_keep": { + SchemaProps: spec.SchemaProps{ + Type: []string{"string"}, + Format: "", + }, + }, + }, + }, + }, + } +} + +func schema_pkg_apis_devops_v1alpha3_DiscoverPRFromForks(ref common.ReferenceCallback) common.OpenAPIDefinition { + return common.OpenAPIDefinition{ + Schema: spec.Schema{ + SchemaProps: spec.SchemaProps{ + Type: []string{"object"}, + Properties: map[string]spec.Schema{ + "strategy": { + SchemaProps: spec.SchemaProps{ + Type: []string{"integer"}, + Format: "int32", + }, + }, + "trust": { + SchemaProps: spec.SchemaProps{ + Type: []string{"integer"}, + Format: "int32", + }, + }, + }, + }, + }, + } +} + +func schema_pkg_apis_devops_v1alpha3_GitCloneOption(ref common.ReferenceCallback) common.OpenAPIDefinition { + return common.OpenAPIDefinition{ + Schema: spec.Schema{ + SchemaProps: spec.SchemaProps{ + Type: []string{"object"}, + Properties: map[string]spec.Schema{ + "shallow": { + SchemaProps: spec.SchemaProps{ + Type: []string{"boolean"}, + Format: "", + }, + }, + "timeout": { + SchemaProps: spec.SchemaProps{ + Type: []string{"integer"}, + Format: "int32", + }, + }, + "depth": { + SchemaProps: spec.SchemaProps{ + Type: []string{"integer"}, + Format: "int32", + }, + }, + }, + }, + }, + } +} + +func schema_pkg_apis_devops_v1alpha3_GitSource(ref common.ReferenceCallback) common.OpenAPIDefinition { + return common.OpenAPIDefinition{ + Schema: spec.Schema{ + SchemaProps: spec.SchemaProps{ + Type: []string{"object"}, + Properties: map[string]spec.Schema{ + "scm_id": { + SchemaProps: spec.SchemaProps{ + Type: []string{"string"}, + Format: "", + }, + }, + "url": { + SchemaProps: spec.SchemaProps{ + Type: []string{"string"}, + Format: "", + }, + }, + "credential_id": { + SchemaProps: spec.SchemaProps{ + Type: []string{"string"}, + Format: "", + }, + }, + "discover_branches": { + SchemaProps: spec.SchemaProps{ + Type: []string{"boolean"}, + Format: "", + }, + }, + "git_clone_option": { + SchemaProps: spec.SchemaProps{ + Ref: ref("kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3.GitCloneOption"), + }, + }, + "regex_filter": { + SchemaProps: spec.SchemaProps{ + Type: []string{"string"}, + Format: "", + }, + }, + }, + }, + }, + Dependencies: []string{ + "kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3.GitCloneOption"}, + } +} + +func schema_pkg_apis_devops_v1alpha3_GithubSource(ref common.ReferenceCallback) common.OpenAPIDefinition { + return common.OpenAPIDefinition{ + Schema: spec.Schema{ + SchemaProps: spec.SchemaProps{ + Type: []string{"object"}, + Properties: map[string]spec.Schema{ + "scm_id": { + SchemaProps: spec.SchemaProps{ + Type: []string{"string"}, + Format: "", + }, + }, + "owner": { + SchemaProps: spec.SchemaProps{ + Type: []string{"string"}, + Format: "", + }, + }, + "repo": { + SchemaProps: spec.SchemaProps{ + Type: []string{"string"}, + Format: "", + }, + }, + "credential_id": { + SchemaProps: spec.SchemaProps{ + Type: []string{"string"}, + Format: "", + }, + }, + "api_uri": { + SchemaProps: spec.SchemaProps{ + Type: []string{"string"}, + Format: "", + }, + }, + "discover_branches": { + SchemaProps: spec.SchemaProps{ + Type: []string{"integer"}, + Format: "int32", + }, + }, + "discover_pr_from_origin": { + SchemaProps: spec.SchemaProps{ + Type: []string{"integer"}, + Format: "int32", + }, + }, + "discover_pr_from_forks": { + SchemaProps: spec.SchemaProps{ + Ref: ref("kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3.DiscoverPRFromForks"), + }, + }, + "git_clone_option": { + SchemaProps: spec.SchemaProps{ + Ref: ref("kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3.GitCloneOption"), + }, + }, + "regex_filter": { + SchemaProps: spec.SchemaProps{ + Type: []string{"string"}, + Format: "", + }, + }, + }, + }, + }, + Dependencies: []string{ + "kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3.DiscoverPRFromForks", "kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3.GitCloneOption"}, + } +} + +func schema_pkg_apis_devops_v1alpha3_MultiBranchJobTrigger(ref common.ReferenceCallback) common.OpenAPIDefinition { + return common.OpenAPIDefinition{ + Schema: spec.Schema{ + SchemaProps: spec.SchemaProps{ + Type: []string{"object"}, + Properties: map[string]spec.Schema{ + "create_action_job_to_trigger": { + SchemaProps: spec.SchemaProps{ + Type: []string{"string"}, + Format: "", + }, + }, + "delete_action_job_to_trigger": { + SchemaProps: spec.SchemaProps{ + Type: []string{"string"}, + Format: "", + }, + }, + }, + }, + }, + } +} + +func schema_pkg_apis_devops_v1alpha3_MultiBranchPipeline(ref common.ReferenceCallback) common.OpenAPIDefinition { + return common.OpenAPIDefinition{ + Schema: spec.Schema{ + SchemaProps: spec.SchemaProps{ + Type: []string{"object"}, + Properties: map[string]spec.Schema{ + "name": { + SchemaProps: spec.SchemaProps{ + Type: []string{"string"}, + Format: "", + }, + }, + "descriptio": { + SchemaProps: spec.SchemaProps{ + Type: []string{"string"}, + Format: "", + }, + }, + "discarder": { + SchemaProps: spec.SchemaProps{ + Ref: ref("kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3.DiscarderProperty"), + }, + }, + "timer_trigger": { + SchemaProps: spec.SchemaProps{ + Ref: ref("kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3.TimerTrigger"), + }, + }, + "source_type": { + SchemaProps: spec.SchemaProps{ + Type: []string{"string"}, + Format: "", + }, + }, + "git_source": { + SchemaProps: spec.SchemaProps{ + Ref: ref("kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3.GitSource"), + }, + }, + "github_source": { + SchemaProps: spec.SchemaProps{ + Ref: ref("kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3.GithubSource"), + }, + }, + "svn_source": { + SchemaProps: spec.SchemaProps{ + Ref: ref("kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3.SvnSource"), + }, + }, + "single_svn_source": { + SchemaProps: spec.SchemaProps{ + Ref: ref("kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3.SingleSvnSource"), + }, + }, + "bitbucket_server_source": { + SchemaProps: spec.SchemaProps{ + Ref: ref("kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3.BitbucketServerSource"), + }, + }, + "script_path": { + SchemaProps: spec.SchemaProps{ + Type: []string{"string"}, + Format: "", + }, + }, + "multibranch_job_trigger": { + SchemaProps: spec.SchemaProps{ + Ref: ref("kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3.MultiBranchJobTrigger"), + }, + }, + }, + Required: []string{"name", "source_type", "script_path"}, + }, + }, + Dependencies: []string{ + "kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3.BitbucketServerSource", "kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3.DiscarderProperty", "kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3.GitSource", "kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3.GithubSource", "kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3.MultiBranchJobTrigger", "kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3.SingleSvnSource", "kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3.SvnSource", "kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3.TimerTrigger"}, + } +} + +func schema_pkg_apis_devops_v1alpha3_NoScmPipeline(ref common.ReferenceCallback) common.OpenAPIDefinition { + return common.OpenAPIDefinition{ + Schema: spec.Schema{ + SchemaProps: spec.SchemaProps{ + Type: []string{"object"}, + Properties: map[string]spec.Schema{ + "name": { + SchemaProps: spec.SchemaProps{ + Type: []string{"string"}, + Format: "", + }, + }, + "descriptio": { + SchemaProps: spec.SchemaProps{ + Type: []string{"string"}, + Format: "", + }, + }, + "discarder": { + SchemaProps: spec.SchemaProps{ + Ref: ref("kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3.DiscarderProperty"), + }, + }, + "parameters": { + SchemaProps: spec.SchemaProps{ + Type: []string{"array"}, + Items: &spec.SchemaOrArray{ + Schema: &spec.Schema{ + SchemaProps: spec.SchemaProps{ + Ref: ref("kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3.Parameter"), + }, + }, + }, + }, + }, + "disable_concurrent": { + SchemaProps: spec.SchemaProps{ + Type: []string{"boolean"}, + Format: "", + }, + }, + "timer_trigger": { + SchemaProps: spec.SchemaProps{ + Ref: ref("kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3.TimerTrigger"), + }, + }, + "remote_trigger": { + SchemaProps: spec.SchemaProps{ + Ref: ref("kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3.RemoteTrigger"), + }, + }, + "jenkinsfile": { + SchemaProps: spec.SchemaProps{ + Type: []string{"string"}, + Format: "", + }, + }, + }, + Required: []string{"name"}, + }, + }, + Dependencies: []string{ + "kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3.DiscarderProperty", "kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3.Parameter", "kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3.RemoteTrigger", "kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3.TimerTrigger"}, + } +} + +func schema_pkg_apis_devops_v1alpha3_Parameter(ref common.ReferenceCallback) common.OpenAPIDefinition { + return common.OpenAPIDefinition{ + Schema: spec.Schema{ + SchemaProps: spec.SchemaProps{ + Type: []string{"object"}, + Properties: map[string]spec.Schema{ + "name": { + SchemaProps: spec.SchemaProps{ + Type: []string{"string"}, + Format: "", + }, + }, + "default_value": { + SchemaProps: spec.SchemaProps{ + Type: []string{"string"}, + Format: "", + }, + }, + "type": { + SchemaProps: spec.SchemaProps{ + Type: []string{"string"}, + Format: "", + }, + }, + "description": { + SchemaProps: spec.SchemaProps{ + Type: []string{"string"}, + Format: "", + }, + }, + }, + Required: []string{"name", "type"}, + }, + }, + } +} + +func schema_pkg_apis_devops_v1alpha3_Pipeline(ref common.ReferenceCallback) common.OpenAPIDefinition { + return common.OpenAPIDefinition{ + Schema: spec.Schema{ + SchemaProps: spec.SchemaProps{ + Description: "Pipeline is the Schema for the pipelines API", + Type: []string{"object"}, + Properties: map[string]spec.Schema{ + "kind": { + SchemaProps: spec.SchemaProps{ + Description: "Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds", + Type: []string{"string"}, + Format: "", + }, + }, + "apiVersion": { + SchemaProps: spec.SchemaProps{ + Description: "APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources", + Type: []string{"string"}, + Format: "", + }, + }, + "metadata": { + SchemaProps: spec.SchemaProps{ + Ref: ref("k8s.io/apimachinery/pkg/apis/meta/v1.ObjectMeta"), + }, + }, + "spec": { + SchemaProps: spec.SchemaProps{ + Ref: ref("kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3.PipelineSpec"), + }, + }, + "status": { + SchemaProps: spec.SchemaProps{ + Ref: ref("kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3.PipelineStatus"), + }, + }, + }, + }, + }, + Dependencies: []string{ + "k8s.io/apimachinery/pkg/apis/meta/v1.ObjectMeta", "kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3.PipelineSpec", "kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3.PipelineStatus"}, + } +} + +func schema_pkg_apis_devops_v1alpha3_PipelineList(ref common.ReferenceCallback) common.OpenAPIDefinition { + return common.OpenAPIDefinition{ + Schema: spec.Schema{ + SchemaProps: spec.SchemaProps{ + Description: "PipelineList contains a list of Pipeline", + Type: []string{"object"}, + Properties: map[string]spec.Schema{ + "kind": { + SchemaProps: spec.SchemaProps{ + Description: "Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds", + Type: []string{"string"}, + Format: "", + }, + }, + "apiVersion": { + SchemaProps: spec.SchemaProps{ + Description: "APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources", + Type: []string{"string"}, + Format: "", + }, + }, + "metadata": { + SchemaProps: spec.SchemaProps{ + Ref: ref("k8s.io/apimachinery/pkg/apis/meta/v1.ListMeta"), + }, + }, + "items": { + SchemaProps: spec.SchemaProps{ + Type: []string{"array"}, + Items: &spec.SchemaOrArray{ + Schema: &spec.Schema{ + SchemaProps: spec.SchemaProps{ + Ref: ref("kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3.Pipeline"), + }, + }, + }, + }, + }, + }, + Required: []string{"items"}, + }, + }, + Dependencies: []string{ + "k8s.io/apimachinery/pkg/apis/meta/v1.ListMeta", "kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3.Pipeline"}, + } +} + +func schema_pkg_apis_devops_v1alpha3_PipelineSpec(ref common.ReferenceCallback) common.OpenAPIDefinition { + return common.OpenAPIDefinition{ + Schema: spec.Schema{ + SchemaProps: spec.SchemaProps{ + Description: "PipelineSpec defines the desired state of Pipeline", + Type: []string{"object"}, + Properties: map[string]spec.Schema{ + "type": { + SchemaProps: spec.SchemaProps{ + Description: "INSERT ADDITIONAL SPEC FIELDS - desired state of cluster Important: Run \"make\" to regenerate code after modifying this file", + Type: []string{"string"}, + Format: "", + }, + }, + "pipeline": { + SchemaProps: spec.SchemaProps{ + Ref: ref("kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3.NoScmPipeline"), + }, + }, + "multi_branch_pipeline": { + SchemaProps: spec.SchemaProps{ + Ref: ref("kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3.MultiBranchPipeline"), + }, + }, + }, + Required: []string{"type"}, + }, + }, + Dependencies: []string{ + "kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3.MultiBranchPipeline", "kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3.NoScmPipeline"}, + } +} + +func schema_pkg_apis_devops_v1alpha3_PipelineStatus(ref common.ReferenceCallback) common.OpenAPIDefinition { + return common.OpenAPIDefinition{ + Schema: spec.Schema{ + SchemaProps: spec.SchemaProps{ + Description: "PipelineStatus defines the observed state of Pipeline", + Type: []string{"object"}, + }, + }, + } +} + +func schema_pkg_apis_devops_v1alpha3_RemoteTrigger(ref common.ReferenceCallback) common.OpenAPIDefinition { + return common.OpenAPIDefinition{ + Schema: spec.Schema{ + SchemaProps: spec.SchemaProps{ + Type: []string{"object"}, + Properties: map[string]spec.Schema{ + "token": { + SchemaProps: spec.SchemaProps{ + Type: []string{"string"}, + Format: "", + }, + }, + }, + }, + }, + } +} + +func schema_pkg_apis_devops_v1alpha3_SingleSvnSource(ref common.ReferenceCallback) common.OpenAPIDefinition { + return common.OpenAPIDefinition{ + Schema: spec.Schema{ + SchemaProps: spec.SchemaProps{ + Type: []string{"object"}, + Properties: map[string]spec.Schema{ + "scm_id": { + SchemaProps: spec.SchemaProps{ + Type: []string{"string"}, + Format: "", + }, + }, + "remote": { + SchemaProps: spec.SchemaProps{ + Type: []string{"string"}, + Format: "", + }, + }, + "credential_id": { + SchemaProps: spec.SchemaProps{ + Type: []string{"string"}, + Format: "", + }, + }, + }, + }, + }, + } +} + +func schema_pkg_apis_devops_v1alpha3_SvnSource(ref common.ReferenceCallback) common.OpenAPIDefinition { + return common.OpenAPIDefinition{ + Schema: spec.Schema{ + SchemaProps: spec.SchemaProps{ + Type: []string{"object"}, + Properties: map[string]spec.Schema{ + "scm_id": { + SchemaProps: spec.SchemaProps{ + Type: []string{"string"}, + Format: "", + }, + }, + "remote": { + SchemaProps: spec.SchemaProps{ + Type: []string{"string"}, + Format: "", + }, + }, + "credential_id": { + SchemaProps: spec.SchemaProps{ + Type: []string{"string"}, + Format: "", + }, + }, + "includes": { + SchemaProps: spec.SchemaProps{ + Type: []string{"string"}, + Format: "", + }, + }, + "excludes": { + SchemaProps: spec.SchemaProps{ + Type: []string{"string"}, + Format: "", + }, + }, + }, + }, + }, + } +} + +func schema_pkg_apis_devops_v1alpha3_TimerTrigger(ref common.ReferenceCallback) common.OpenAPIDefinition { + return common.OpenAPIDefinition{ + Schema: spec.Schema{ + SchemaProps: spec.SchemaProps{ + Type: []string{"object"}, + Properties: map[string]spec.Schema{ + "cron": { + SchemaProps: spec.SchemaProps{ + Description: "user in no scm job", + Type: []string{"string"}, + Format: "", + }, + }, + "interval": { + SchemaProps: spec.SchemaProps{ + Description: "use in multi-branch job", + Type: []string{"string"}, + Format: "", + }, + }, + }, + }, + }, + } +} diff --git a/pkg/apis/devops/v1alpha3/pipeline_types.go b/pkg/apis/devops/v1alpha3/pipeline_types.go new file mode 100644 index 000000000..34ea5161c --- /dev/null +++ b/pkg/apis/devops/v1alpha3/pipeline_types.go @@ -0,0 +1,192 @@ +/* +Copyright 2019 The KubeSphere authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package v1alpha3 + +import ( + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" +) + +// EDIT THIS FILE! THIS IS SCAFFOLDING FOR YOU TO OWN! +// NOTE: json tags are required. Any new fields you add must have json tags for the fields to be serialized. + +const PipelineFinalizerName = "pipeline.finalizers.kubesphere.io" + +const ( + ResourceKindPipeline = "Pipeline" + ResourceSingularPipeline = "pipeline" + ResourcePluralPipeline = "pipelines" +) + +// PipelineSpec defines the desired state of Pipeline +type PipelineSpec struct { + // INSERT ADDITIONAL SPEC FIELDS - desired state of cluster + // Important: Run "make" to regenerate code after modifying this file + Type string `json:"type" description:"type of devops pipeline, in scm or no scm"` + Pipeline *NoScmPipeline `json:"pipeline,omitempty" description:"no scm pipeline structs"` + MultiBranchPipeline *MultiBranchPipeline `json:"multi_branch_pipeline,omitempty" description:"in scm pipeline structs"` +} + +// PipelineStatus defines the observed state of Pipeline +type PipelineStatus struct { + // INSERT ADDITIONAL STATUS FIELD - define observed state of cluster + // Important: Run "make" to regenerate code after modifying this file +} + +// +genclient +// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object + +// Pipeline is the Schema for the pipelines API +// +k8s:openapi-gen=true +type Pipeline struct { + metav1.TypeMeta `json:",inline"` + metav1.ObjectMeta `json:"metadata,omitempty"` + + Spec PipelineSpec `json:"spec,omitempty"` + Status PipelineStatus `json:"status,omitempty"` +} + +// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object + +// PipelineList contains a list of Pipeline +type PipelineList struct { + metav1.TypeMeta `json:",inline"` + metav1.ListMeta `json:"metadata,omitempty"` + Items []Pipeline `json:"items"` +} + +func init() { + SchemeBuilder.Register(&Pipeline{}, &PipelineList{}) +} + +const ( + NoScmPipelineType = "pipeline" + MultiBranchPipelineType = "multi-branch-pipeline" +) + +type NoScmPipeline struct { + Name string `json:"name" description:"name of pipeline"` + Description string `json:"descriptio,omitempty" description:"description of pipeline"` + Discarder *DiscarderProperty `json:"discarder,omitempty" description:"Discarder of pipeline, managing when to drop a pipeline"` + Parameters []Parameter `json:"parameters,omitempty" description:"Parameters define of pipeline,user could pass param when run pipeline"` + DisableConcurrent bool `json:"disable_concurrent,omitempty" mapstructure:"disable_concurrent" description:"Whether to prohibit the pipeline from running in parallel"` + TimerTrigger *TimerTrigger `json:"timer_trigger,omitempty" mapstructure:"timer_trigger" description:"Timer to trigger pipeline run"` + RemoteTrigger *RemoteTrigger `json:"remote_trigger,omitempty" mapstructure:"remote_trigger" description:"Remote api define to trigger pipeline run"` + Jenkinsfile string `json:"jenkinsfile,omitempty" description:"Jenkinsfile's content'"` +} + +type MultiBranchPipeline struct { + Name string `json:"name" description:"name of pipeline"` + Description string `json:"descriptio,omitempty" description:"description of pipeline"` + Discarder *DiscarderProperty `json:"discarder,omitempty" description:"Discarder of pipeline, managing when to drop a pipeline"` + TimerTrigger *TimerTrigger `json:"timer_trigger,omitempty" mapstructure:"timer_trigger" description:"Timer to trigger pipeline run"` + SourceType string `json:"source_type" description:"type of scm, such as github/git/svn"` + GitSource *GitSource `json:"git_source,omitempty" description:"git scm define"` + GitHubSource *GithubSource `json:"github_source,omitempty" description:"github scm define"` + SvnSource *SvnSource `json:"svn_source,omitempty" description:"multi branch svn scm define"` + SingleSvnSource *SingleSvnSource `json:"single_svn_source,omitempty" description:"single branch svn scm define"` + BitbucketServerSource *BitbucketServerSource `json:"bitbucket_server_source,omitempty" description:"bitbucket server scm defile"` + ScriptPath string `json:"script_path" mapstructure:"script_path" description:"script path in scm"` + MultiBranchJobTrigger *MultiBranchJobTrigger `json:"multibranch_job_trigger,omitempty" mapstructure:"multibranch_job_trigger" description:"Pipeline tasks that need to be triggered when branch creation/deletion"` +} + +type GitSource struct { + ScmId string `json:"scm_id,omitempty" description:"uid of scm"` + Url string `json:"url,omitempty" mapstructure:"url" description:"url of git source"` + CredentialId string `json:"credential_id,omitempty" mapstructure:"credential_id" description:"credential id to access git source"` + DiscoverBranches bool `json:"discover_branches,omitempty" mapstructure:"discover_branches" description:"Whether to discover a branch"` + CloneOption *GitCloneOption `json:"git_clone_option,omitempty" mapstructure:"git_clone_option" description:"advavced git clone options"` + RegexFilter string `json:"regex_filter,omitempty" mapstructure:"regex_filter" description:"Regex used to match the name of the branch that needs to be run"` +} + +type GithubSource struct { + ScmId string `json:"scm_id,omitempty" description:"uid of scm"` + Owner string `json:"owner,omitempty" mapstructure:"owner" description:"owner of github repo"` + Repo string `json:"repo,omitempty" mapstructure:"repo" description:"repo name of github repo"` + CredentialId string `json:"credential_id,omitempty" mapstructure:"credential_id" description:"credential id to access github source"` + ApiUri string `json:"api_uri,omitempty" mapstructure:"api_uri" description:"The api url can specify the location of the github apiserver.For private cloud configuration"` + DiscoverBranches int `json:"discover_branches,omitempty" mapstructure:"discover_branches" description:"Discover branch configuration"` + DiscoverPRFromOrigin int `json:"discover_pr_from_origin,omitempty" mapstructure:"discover_pr_from_origin" description:"Discover origin PR configuration"` + DiscoverPRFromForks *DiscoverPRFromForks `json:"discover_pr_from_forks,omitempty" mapstructure:"discover_pr_from_forks" description:"Discover fork PR configuration"` + CloneOption *GitCloneOption `json:"git_clone_option,omitempty" mapstructure:"git_clone_option" description:"advavced git clone options"` + RegexFilter string `json:"regex_filter,omitempty" mapstructure:"regex_filter" description:"Regex used to match the name of the branch that needs to be run"` +} + +type MultiBranchJobTrigger struct { + CreateActionJobsToTrigger string `json:"create_action_job_to_trigger,omitempty" description:"pipeline name to trigger"` + DeleteActionJobsToTrigger string `json:"delete_action_job_to_trigger,omitempty" description:"pipeline name to trigger"` +} + +type BitbucketServerSource struct { + ScmId string `json:"scm_id,omitempty" description:"uid of scm"` + Owner string `json:"owner,omitempty" mapstructure:"owner" description:"owner of github repo"` + Repo string `json:"repo,omitempty" mapstructure:"repo" description:"repo name of github repo"` + CredentialId string `json:"credential_id,omitempty" mapstructure:"credential_id" description:"credential id to access github source"` + ApiUri string `json:"api_uri,omitempty" mapstructure:"api_uri" description:"The api url can specify the location of the github apiserver.For private cloud configuration"` + DiscoverBranches int `json:"discover_branches,omitempty" mapstructure:"discover_branches" description:"Discover branch configuration"` + DiscoverPRFromOrigin int `json:"discover_pr_from_origin,omitempty" mapstructure:"discover_pr_from_origin" description:"Discover origin PR configuration"` + DiscoverPRFromForks *DiscoverPRFromForks `json:"discover_pr_from_forks,omitempty" mapstructure:"discover_pr_from_forks" description:"Discover fork PR configuration"` + CloneOption *GitCloneOption `json:"git_clone_option,omitempty" mapstructure:"git_clone_option" description:"advavced git clone options"` + RegexFilter string `json:"regex_filter,omitempty" mapstructure:"regex_filter" description:"Regex used to match the name of the branch that needs to be run"` +} + +type GitCloneOption struct { + Shallow bool `json:"shallow,omitempty" mapstructure:"shallow" description:"Whether to use git shallow clone"` + Timeout int `json:"timeout,omitempty" mapstructure:"timeout" description:"git clone timeout mins"` + Depth int `json:"depth,omitempty" mapstructure:"depth" description:"git clone depth"` +} + +type SvnSource struct { + ScmId string `json:"scm_id,omitempty" description:"uid of scm"` + Remote string `json:"remote,omitempty" description:"remote address url"` + CredentialId string `json:"credential_id,omitempty" mapstructure:"credential_id" description:"credential id to access svn source"` + Includes string `json:"includes,omitempty" description:"branches to run pipeline"` + Excludes string `json:"excludes,omitempty" description:"branches do not run pipeline"` +} +type SingleSvnSource struct { + ScmId string `json:"scm_id,omitempty" description:"uid of scm"` + Remote string `json:"remote,omitempty" description:"remote address url"` + CredentialId string `json:"credential_id,omitempty" mapstructure:"credential_id" description:"credential id to access svn source"` +} + +type DiscoverPRFromForks struct { + Strategy int `json:"strategy,omitempty" mapstructure:"strategy" description:"github discover strategy"` + Trust int `json:"trust,omitempty" mapstructure:"trust" description:"trust user type"` +} + +type DiscarderProperty struct { + DaysToKeep string `json:"days_to_keep,omitempty" mapstructure:"days_to_keep" description:"days to keep pipeline"` + NumToKeep string `json:"num_to_keep,omitempty" mapstructure:"num_to_keep" description:"nums to keep pipeline"` +} + +type Parameter struct { + Name string `json:"name" description:"name of param"` + DefaultValue string `json:"default_value,omitempty" mapstructure:"default_value" description:"default value of param"` + Type string `json:"type" description:"type of param"` + Description string `json:"description,omitempty" description:"description of pipeline"` +} + +type TimerTrigger struct { + // user in no scm job + Cron string `json:"cron,omitempty" description:"jenkins cron script"` + + // use in multi-branch job + Interval string `json:"interval,omitempty" description:"interval ms"` +} + +type RemoteTrigger struct { + Token string `json:"token,omitempty" description:"remote trigger token"` +} diff --git a/pkg/apis/devops/v1alpha3/pipeline_types_test.go b/pkg/apis/devops/v1alpha3/pipeline_types_test.go new file mode 100644 index 000000000..f8c435fe6 --- /dev/null +++ b/pkg/apis/devops/v1alpha3/pipeline_types_test.go @@ -0,0 +1,58 @@ +/* +Copyright 2019 The KubeSphere authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package v1alpha3 + +import ( + "testing" + + "github.com/onsi/gomega" + "golang.org/x/net/context" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" + "k8s.io/apimachinery/pkg/types" +) + +func TestStoragePipeline(t *testing.T) { + key := types.NamespacedName{ + Name: "foo", + Namespace: "default", + } + created := &Pipeline{ + ObjectMeta: metav1.ObjectMeta{ + Name: "foo", + Namespace: "default", + }} + g := gomega.NewGomegaWithT(t) + + // Test Create + fetched := &Pipeline{} + g.Expect(c.Create(context.TODO(), created)).To(gomega.Succeed()) + + g.Expect(c.Get(context.TODO(), key, fetched)).To(gomega.Succeed()) + g.Expect(fetched).To(gomega.Equal(created)) + + // Test Updating the Labels + updated := fetched.DeepCopy() + updated.Labels = map[string]string{"hello": "world"} + g.Expect(c.Update(context.TODO(), updated)).To(gomega.Succeed()) + + g.Expect(c.Get(context.TODO(), key, fetched)).To(gomega.Succeed()) + g.Expect(fetched).To(gomega.Equal(updated)) + + // Test Delete + g.Expect(c.Delete(context.TODO(), fetched)).To(gomega.Succeed()) + g.Expect(c.Get(context.TODO(), key, fetched)).ToNot(gomega.Succeed()) +} diff --git a/pkg/apis/devops/v1alpha3/zz_generated.deepcopy.go b/pkg/apis/devops/v1alpha3/zz_generated.deepcopy.go index 416ab132f..3f7dd8c4f 100644 --- a/pkg/apis/devops/v1alpha3/zz_generated.deepcopy.go +++ b/pkg/apis/devops/v1alpha3/zz_generated.deepcopy.go @@ -24,6 +24,31 @@ import ( runtime "k8s.io/apimachinery/pkg/runtime" ) +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *BitbucketServerSource) DeepCopyInto(out *BitbucketServerSource) { + *out = *in + if in.DiscoverPRFromForks != nil { + in, out := &in.DiscoverPRFromForks, &out.DiscoverPRFromForks + *out = new(DiscoverPRFromForks) + **out = **in + } + if in.CloneOption != nil { + in, out := &in.CloneOption, &out.CloneOption + *out = new(GitCloneOption) + **out = **in + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BitbucketServerSource. +func (in *BitbucketServerSource) DeepCopy() *BitbucketServerSource { + if in == nil { + return nil + } + out := new(BitbucketServerSource) + in.DeepCopyInto(out) + return out +} + // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *DevOpsProject) DeepCopyInto(out *DevOpsProject) { *out = *in @@ -112,3 +137,372 @@ func (in *DevOpsProjectStatus) DeepCopy() *DevOpsProjectStatus { in.DeepCopyInto(out) return out } + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *DiscarderProperty) DeepCopyInto(out *DiscarderProperty) { + *out = *in +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DiscarderProperty. +func (in *DiscarderProperty) DeepCopy() *DiscarderProperty { + if in == nil { + return nil + } + out := new(DiscarderProperty) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *DiscoverPRFromForks) DeepCopyInto(out *DiscoverPRFromForks) { + *out = *in +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DiscoverPRFromForks. +func (in *DiscoverPRFromForks) DeepCopy() *DiscoverPRFromForks { + if in == nil { + return nil + } + out := new(DiscoverPRFromForks) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *GitCloneOption) DeepCopyInto(out *GitCloneOption) { + *out = *in +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new GitCloneOption. +func (in *GitCloneOption) DeepCopy() *GitCloneOption { + if in == nil { + return nil + } + out := new(GitCloneOption) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *GitSource) DeepCopyInto(out *GitSource) { + *out = *in + if in.CloneOption != nil { + in, out := &in.CloneOption, &out.CloneOption + *out = new(GitCloneOption) + **out = **in + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new GitSource. +func (in *GitSource) DeepCopy() *GitSource { + if in == nil { + return nil + } + out := new(GitSource) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *GithubSource) DeepCopyInto(out *GithubSource) { + *out = *in + if in.DiscoverPRFromForks != nil { + in, out := &in.DiscoverPRFromForks, &out.DiscoverPRFromForks + *out = new(DiscoverPRFromForks) + **out = **in + } + if in.CloneOption != nil { + in, out := &in.CloneOption, &out.CloneOption + *out = new(GitCloneOption) + **out = **in + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new GithubSource. +func (in *GithubSource) DeepCopy() *GithubSource { + if in == nil { + return nil + } + out := new(GithubSource) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *MultiBranchJobTrigger) DeepCopyInto(out *MultiBranchJobTrigger) { + *out = *in +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MultiBranchJobTrigger. +func (in *MultiBranchJobTrigger) DeepCopy() *MultiBranchJobTrigger { + if in == nil { + return nil + } + out := new(MultiBranchJobTrigger) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *MultiBranchPipeline) DeepCopyInto(out *MultiBranchPipeline) { + *out = *in + if in.Discarder != nil { + in, out := &in.Discarder, &out.Discarder + *out = new(DiscarderProperty) + **out = **in + } + if in.TimerTrigger != nil { + in, out := &in.TimerTrigger, &out.TimerTrigger + *out = new(TimerTrigger) + **out = **in + } + if in.GitSource != nil { + in, out := &in.GitSource, &out.GitSource + *out = new(GitSource) + (*in).DeepCopyInto(*out) + } + if in.GitHubSource != nil { + in, out := &in.GitHubSource, &out.GitHubSource + *out = new(GithubSource) + (*in).DeepCopyInto(*out) + } + if in.SvnSource != nil { + in, out := &in.SvnSource, &out.SvnSource + *out = new(SvnSource) + **out = **in + } + if in.SingleSvnSource != nil { + in, out := &in.SingleSvnSource, &out.SingleSvnSource + *out = new(SingleSvnSource) + **out = **in + } + if in.BitbucketServerSource != nil { + in, out := &in.BitbucketServerSource, &out.BitbucketServerSource + *out = new(BitbucketServerSource) + (*in).DeepCopyInto(*out) + } + if in.MultiBranchJobTrigger != nil { + in, out := &in.MultiBranchJobTrigger, &out.MultiBranchJobTrigger + *out = new(MultiBranchJobTrigger) + **out = **in + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MultiBranchPipeline. +func (in *MultiBranchPipeline) DeepCopy() *MultiBranchPipeline { + if in == nil { + return nil + } + out := new(MultiBranchPipeline) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *NoScmPipeline) DeepCopyInto(out *NoScmPipeline) { + *out = *in + if in.Discarder != nil { + in, out := &in.Discarder, &out.Discarder + *out = new(DiscarderProperty) + **out = **in + } + if in.Parameters != nil { + in, out := &in.Parameters, &out.Parameters + *out = make([]Parameter, len(*in)) + copy(*out, *in) + } + if in.TimerTrigger != nil { + in, out := &in.TimerTrigger, &out.TimerTrigger + *out = new(TimerTrigger) + **out = **in + } + if in.RemoteTrigger != nil { + in, out := &in.RemoteTrigger, &out.RemoteTrigger + *out = new(RemoteTrigger) + **out = **in + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new NoScmPipeline. +func (in *NoScmPipeline) DeepCopy() *NoScmPipeline { + if in == nil { + return nil + } + out := new(NoScmPipeline) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *Parameter) DeepCopyInto(out *Parameter) { + *out = *in +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Parameter. +func (in *Parameter) DeepCopy() *Parameter { + if in == nil { + return nil + } + out := new(Parameter) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *Pipeline) DeepCopyInto(out *Pipeline) { + *out = *in + out.TypeMeta = in.TypeMeta + in.ObjectMeta.DeepCopyInto(&out.ObjectMeta) + in.Spec.DeepCopyInto(&out.Spec) + out.Status = in.Status +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Pipeline. +func (in *Pipeline) DeepCopy() *Pipeline { + if in == nil { + return nil + } + out := new(Pipeline) + in.DeepCopyInto(out) + return out +} + +// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object. +func (in *Pipeline) DeepCopyObject() runtime.Object { + if c := in.DeepCopy(); c != nil { + return c + } + return nil +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *PipelineList) DeepCopyInto(out *PipelineList) { + *out = *in + out.TypeMeta = in.TypeMeta + in.ListMeta.DeepCopyInto(&out.ListMeta) + if in.Items != nil { + in, out := &in.Items, &out.Items + *out = make([]Pipeline, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PipelineList. +func (in *PipelineList) DeepCopy() *PipelineList { + if in == nil { + return nil + } + out := new(PipelineList) + in.DeepCopyInto(out) + return out +} + +// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object. +func (in *PipelineList) DeepCopyObject() runtime.Object { + if c := in.DeepCopy(); c != nil { + return c + } + return nil +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *PipelineSpec) DeepCopyInto(out *PipelineSpec) { + *out = *in + if in.Pipeline != nil { + in, out := &in.Pipeline, &out.Pipeline + *out = new(NoScmPipeline) + (*in).DeepCopyInto(*out) + } + if in.MultiBranchPipeline != nil { + in, out := &in.MultiBranchPipeline, &out.MultiBranchPipeline + *out = new(MultiBranchPipeline) + (*in).DeepCopyInto(*out) + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PipelineSpec. +func (in *PipelineSpec) DeepCopy() *PipelineSpec { + if in == nil { + return nil + } + out := new(PipelineSpec) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *PipelineStatus) DeepCopyInto(out *PipelineStatus) { + *out = *in +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PipelineStatus. +func (in *PipelineStatus) DeepCopy() *PipelineStatus { + if in == nil { + return nil + } + out := new(PipelineStatus) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *RemoteTrigger) DeepCopyInto(out *RemoteTrigger) { + *out = *in +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RemoteTrigger. +func (in *RemoteTrigger) DeepCopy() *RemoteTrigger { + if in == nil { + return nil + } + out := new(RemoteTrigger) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *SingleSvnSource) DeepCopyInto(out *SingleSvnSource) { + *out = *in +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SingleSvnSource. +func (in *SingleSvnSource) DeepCopy() *SingleSvnSource { + if in == nil { + return nil + } + out := new(SingleSvnSource) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *SvnSource) DeepCopyInto(out *SvnSource) { + *out = *in +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SvnSource. +func (in *SvnSource) DeepCopy() *SvnSource { + if in == nil { + return nil + } + out := new(SvnSource) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *TimerTrigger) DeepCopyInto(out *TimerTrigger) { + *out = *in +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TimerTrigger. +func (in *TimerTrigger) DeepCopy() *TimerTrigger { + if in == nil { + return nil + } + out := new(TimerTrigger) + in.DeepCopyInto(out) + return out +} diff --git a/pkg/client/clientset/versioned/typed/devops/v1alpha3/devops_client.go b/pkg/client/clientset/versioned/typed/devops/v1alpha3/devops_client.go index b864758e5..8884644db 100644 --- a/pkg/client/clientset/versioned/typed/devops/v1alpha3/devops_client.go +++ b/pkg/client/clientset/versioned/typed/devops/v1alpha3/devops_client.go @@ -27,6 +27,7 @@ import ( type DevopsV1alpha3Interface interface { RESTClient() rest.Interface DevOpsProjectsGetter + PipelinesGetter } // DevopsV1alpha3Client is used to interact with features provided by the devops.kubesphere.io group. @@ -38,6 +39,10 @@ func (c *DevopsV1alpha3Client) DevOpsProjects() DevOpsProjectInterface { return newDevOpsProjects(c) } +func (c *DevopsV1alpha3Client) Pipelines(namespace string) PipelineInterface { + return newPipelines(c, namespace) +} + // NewForConfig creates a new DevopsV1alpha3Client for the given config. func NewForConfig(c *rest.Config) (*DevopsV1alpha3Client, error) { config := *c diff --git a/pkg/client/clientset/versioned/typed/devops/v1alpha3/fake/fake_devops_client.go b/pkg/client/clientset/versioned/typed/devops/v1alpha3/fake/fake_devops_client.go index e17f6e60c..ba7a1ebc0 100644 --- a/pkg/client/clientset/versioned/typed/devops/v1alpha3/fake/fake_devops_client.go +++ b/pkg/client/clientset/versioned/typed/devops/v1alpha3/fake/fake_devops_client.go @@ -32,6 +32,10 @@ func (c *FakeDevopsV1alpha3) DevOpsProjects() v1alpha3.DevOpsProjectInterface { return &FakeDevOpsProjects{c} } +func (c *FakeDevopsV1alpha3) Pipelines(namespace string) v1alpha3.PipelineInterface { + return &FakePipelines{c, namespace} +} + // RESTClient returns a RESTClient that is used to communicate // with API server by this client implementation. func (c *FakeDevopsV1alpha3) RESTClient() rest.Interface { diff --git a/pkg/client/clientset/versioned/typed/devops/v1alpha3/fake/fake_pipeline.go b/pkg/client/clientset/versioned/typed/devops/v1alpha3/fake/fake_pipeline.go new file mode 100644 index 000000000..5622bc57f --- /dev/null +++ b/pkg/client/clientset/versioned/typed/devops/v1alpha3/fake/fake_pipeline.go @@ -0,0 +1,140 @@ +/* +Copyright 2019 The KubeSphere authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +// Code generated by client-gen. DO NOT EDIT. + +package fake + +import ( + v1 "k8s.io/apimachinery/pkg/apis/meta/v1" + labels "k8s.io/apimachinery/pkg/labels" + schema "k8s.io/apimachinery/pkg/runtime/schema" + types "k8s.io/apimachinery/pkg/types" + watch "k8s.io/apimachinery/pkg/watch" + testing "k8s.io/client-go/testing" + v1alpha3 "kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3" +) + +// FakePipelines implements PipelineInterface +type FakePipelines struct { + Fake *FakeDevopsV1alpha3 + ns string +} + +var pipelinesResource = schema.GroupVersionResource{Group: "devops.kubesphere.io", Version: "v1alpha3", Resource: "pipelines"} + +var pipelinesKind = schema.GroupVersionKind{Group: "devops.kubesphere.io", Version: "v1alpha3", Kind: "Pipeline"} + +// Get takes name of the pipeline, and returns the corresponding pipeline object, and an error if there is any. +func (c *FakePipelines) Get(name string, options v1.GetOptions) (result *v1alpha3.Pipeline, err error) { + obj, err := c.Fake. + Invokes(testing.NewGetAction(pipelinesResource, c.ns, name), &v1alpha3.Pipeline{}) + + if obj == nil { + return nil, err + } + return obj.(*v1alpha3.Pipeline), err +} + +// List takes label and field selectors, and returns the list of Pipelines that match those selectors. +func (c *FakePipelines) List(opts v1.ListOptions) (result *v1alpha3.PipelineList, err error) { + obj, err := c.Fake. + Invokes(testing.NewListAction(pipelinesResource, pipelinesKind, c.ns, opts), &v1alpha3.PipelineList{}) + + if obj == nil { + return nil, err + } + + label, _, _ := testing.ExtractFromListOptions(opts) + if label == nil { + label = labels.Everything() + } + list := &v1alpha3.PipelineList{ListMeta: obj.(*v1alpha3.PipelineList).ListMeta} + for _, item := range obj.(*v1alpha3.PipelineList).Items { + if label.Matches(labels.Set(item.Labels)) { + list.Items = append(list.Items, item) + } + } + return list, err +} + +// Watch returns a watch.Interface that watches the requested pipelines. +func (c *FakePipelines) Watch(opts v1.ListOptions) (watch.Interface, error) { + return c.Fake. + InvokesWatch(testing.NewWatchAction(pipelinesResource, c.ns, opts)) + +} + +// Create takes the representation of a pipeline and creates it. Returns the server's representation of the pipeline, and an error, if there is any. +func (c *FakePipelines) Create(pipeline *v1alpha3.Pipeline) (result *v1alpha3.Pipeline, err error) { + obj, err := c.Fake. + Invokes(testing.NewCreateAction(pipelinesResource, c.ns, pipeline), &v1alpha3.Pipeline{}) + + if obj == nil { + return nil, err + } + return obj.(*v1alpha3.Pipeline), err +} + +// Update takes the representation of a pipeline and updates it. Returns the server's representation of the pipeline, and an error, if there is any. +func (c *FakePipelines) Update(pipeline *v1alpha3.Pipeline) (result *v1alpha3.Pipeline, err error) { + obj, err := c.Fake. + Invokes(testing.NewUpdateAction(pipelinesResource, c.ns, pipeline), &v1alpha3.Pipeline{}) + + if obj == nil { + return nil, err + } + return obj.(*v1alpha3.Pipeline), err +} + +// UpdateStatus was generated because the type contains a Status member. +// Add a +genclient:noStatus comment above the type to avoid generating UpdateStatus(). +func (c *FakePipelines) UpdateStatus(pipeline *v1alpha3.Pipeline) (*v1alpha3.Pipeline, error) { + obj, err := c.Fake. + Invokes(testing.NewUpdateSubresourceAction(pipelinesResource, "status", c.ns, pipeline), &v1alpha3.Pipeline{}) + + if obj == nil { + return nil, err + } + return obj.(*v1alpha3.Pipeline), err +} + +// Delete takes name of the pipeline and deletes it. Returns an error if one occurs. +func (c *FakePipelines) Delete(name string, options *v1.DeleteOptions) error { + _, err := c.Fake. + Invokes(testing.NewDeleteAction(pipelinesResource, c.ns, name), &v1alpha3.Pipeline{}) + + return err +} + +// DeleteCollection deletes a collection of objects. +func (c *FakePipelines) DeleteCollection(options *v1.DeleteOptions, listOptions v1.ListOptions) error { + action := testing.NewDeleteCollectionAction(pipelinesResource, c.ns, listOptions) + + _, err := c.Fake.Invokes(action, &v1alpha3.PipelineList{}) + return err +} + +// Patch applies the patch and returns the patched pipeline. +func (c *FakePipelines) Patch(name string, pt types.PatchType, data []byte, subresources ...string) (result *v1alpha3.Pipeline, err error) { + obj, err := c.Fake. + Invokes(testing.NewPatchSubresourceAction(pipelinesResource, c.ns, name, pt, data, subresources...), &v1alpha3.Pipeline{}) + + if obj == nil { + return nil, err + } + return obj.(*v1alpha3.Pipeline), err +} diff --git a/pkg/client/clientset/versioned/typed/devops/v1alpha3/generated_expansion.go b/pkg/client/clientset/versioned/typed/devops/v1alpha3/generated_expansion.go index 24c098083..e854e495d 100644 --- a/pkg/client/clientset/versioned/typed/devops/v1alpha3/generated_expansion.go +++ b/pkg/client/clientset/versioned/typed/devops/v1alpha3/generated_expansion.go @@ -19,3 +19,5 @@ limitations under the License. package v1alpha3 type DevOpsProjectExpansion interface{} + +type PipelineExpansion interface{} diff --git a/pkg/client/clientset/versioned/typed/devops/v1alpha3/pipeline.go b/pkg/client/clientset/versioned/typed/devops/v1alpha3/pipeline.go new file mode 100644 index 000000000..675e4bfbf --- /dev/null +++ b/pkg/client/clientset/versioned/typed/devops/v1alpha3/pipeline.go @@ -0,0 +1,191 @@ +/* +Copyright 2019 The KubeSphere authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +// Code generated by client-gen. DO NOT EDIT. + +package v1alpha3 + +import ( + "time" + + v1 "k8s.io/apimachinery/pkg/apis/meta/v1" + types "k8s.io/apimachinery/pkg/types" + watch "k8s.io/apimachinery/pkg/watch" + rest "k8s.io/client-go/rest" + v1alpha3 "kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3" + scheme "kubesphere.io/kubesphere/pkg/client/clientset/versioned/scheme" +) + +// PipelinesGetter has a method to return a PipelineInterface. +// A group's client should implement this interface. +type PipelinesGetter interface { + Pipelines(namespace string) PipelineInterface +} + +// PipelineInterface has methods to work with Pipeline resources. +type PipelineInterface interface { + Create(*v1alpha3.Pipeline) (*v1alpha3.Pipeline, error) + Update(*v1alpha3.Pipeline) (*v1alpha3.Pipeline, error) + UpdateStatus(*v1alpha3.Pipeline) (*v1alpha3.Pipeline, error) + Delete(name string, options *v1.DeleteOptions) error + DeleteCollection(options *v1.DeleteOptions, listOptions v1.ListOptions) error + Get(name string, options v1.GetOptions) (*v1alpha3.Pipeline, error) + List(opts v1.ListOptions) (*v1alpha3.PipelineList, error) + Watch(opts v1.ListOptions) (watch.Interface, error) + Patch(name string, pt types.PatchType, data []byte, subresources ...string) (result *v1alpha3.Pipeline, err error) + PipelineExpansion +} + +// pipelines implements PipelineInterface +type pipelines struct { + client rest.Interface + ns string +} + +// newPipelines returns a Pipelines +func newPipelines(c *DevopsV1alpha3Client, namespace string) *pipelines { + return &pipelines{ + client: c.RESTClient(), + ns: namespace, + } +} + +// Get takes name of the pipeline, and returns the corresponding pipeline object, and an error if there is any. +func (c *pipelines) Get(name string, options v1.GetOptions) (result *v1alpha3.Pipeline, err error) { + result = &v1alpha3.Pipeline{} + err = c.client.Get(). + Namespace(c.ns). + Resource("pipelines"). + Name(name). + VersionedParams(&options, scheme.ParameterCodec). + Do(). + Into(result) + return +} + +// List takes label and field selectors, and returns the list of Pipelines that match those selectors. +func (c *pipelines) List(opts v1.ListOptions) (result *v1alpha3.PipelineList, err error) { + var timeout time.Duration + if opts.TimeoutSeconds != nil { + timeout = time.Duration(*opts.TimeoutSeconds) * time.Second + } + result = &v1alpha3.PipelineList{} + err = c.client.Get(). + Namespace(c.ns). + Resource("pipelines"). + VersionedParams(&opts, scheme.ParameterCodec). + Timeout(timeout). + Do(). + Into(result) + return +} + +// Watch returns a watch.Interface that watches the requested pipelines. +func (c *pipelines) Watch(opts v1.ListOptions) (watch.Interface, error) { + var timeout time.Duration + if opts.TimeoutSeconds != nil { + timeout = time.Duration(*opts.TimeoutSeconds) * time.Second + } + opts.Watch = true + return c.client.Get(). + Namespace(c.ns). + Resource("pipelines"). + VersionedParams(&opts, scheme.ParameterCodec). + Timeout(timeout). + Watch() +} + +// Create takes the representation of a pipeline and creates it. Returns the server's representation of the pipeline, and an error, if there is any. +func (c *pipelines) Create(pipeline *v1alpha3.Pipeline) (result *v1alpha3.Pipeline, err error) { + result = &v1alpha3.Pipeline{} + err = c.client.Post(). + Namespace(c.ns). + Resource("pipelines"). + Body(pipeline). + Do(). + Into(result) + return +} + +// Update takes the representation of a pipeline and updates it. Returns the server's representation of the pipeline, and an error, if there is any. +func (c *pipelines) Update(pipeline *v1alpha3.Pipeline) (result *v1alpha3.Pipeline, err error) { + result = &v1alpha3.Pipeline{} + err = c.client.Put(). + Namespace(c.ns). + Resource("pipelines"). + Name(pipeline.Name). + Body(pipeline). + Do(). + Into(result) + return +} + +// UpdateStatus was generated because the type contains a Status member. +// Add a +genclient:noStatus comment above the type to avoid generating UpdateStatus(). + +func (c *pipelines) UpdateStatus(pipeline *v1alpha3.Pipeline) (result *v1alpha3.Pipeline, err error) { + result = &v1alpha3.Pipeline{} + err = c.client.Put(). + Namespace(c.ns). + Resource("pipelines"). + Name(pipeline.Name). + SubResource("status"). + Body(pipeline). + Do(). + Into(result) + return +} + +// Delete takes name of the pipeline and deletes it. Returns an error if one occurs. +func (c *pipelines) Delete(name string, options *v1.DeleteOptions) error { + return c.client.Delete(). + Namespace(c.ns). + Resource("pipelines"). + Name(name). + Body(options). + Do(). + Error() +} + +// DeleteCollection deletes a collection of objects. +func (c *pipelines) DeleteCollection(options *v1.DeleteOptions, listOptions v1.ListOptions) error { + var timeout time.Duration + if listOptions.TimeoutSeconds != nil { + timeout = time.Duration(*listOptions.TimeoutSeconds) * time.Second + } + return c.client.Delete(). + Namespace(c.ns). + Resource("pipelines"). + VersionedParams(&listOptions, scheme.ParameterCodec). + Timeout(timeout). + Body(options). + Do(). + Error() +} + +// Patch applies the patch and returns the patched pipeline. +func (c *pipelines) Patch(name string, pt types.PatchType, data []byte, subresources ...string) (result *v1alpha3.Pipeline, err error) { + result = &v1alpha3.Pipeline{} + err = c.client.Patch(pt). + Namespace(c.ns). + Resource("pipelines"). + SubResource(subresources...). + Name(name). + Body(data). + Do(). + Into(result) + return +} diff --git a/pkg/client/informers/externalversions/devops/v1alpha3/interface.go b/pkg/client/informers/externalversions/devops/v1alpha3/interface.go index 91d7e6e02..90b80745a 100644 --- a/pkg/client/informers/externalversions/devops/v1alpha3/interface.go +++ b/pkg/client/informers/externalversions/devops/v1alpha3/interface.go @@ -26,6 +26,8 @@ import ( type Interface interface { // DevOpsProjects returns a DevOpsProjectInformer. DevOpsProjects() DevOpsProjectInformer + // Pipelines returns a PipelineInformer. + Pipelines() PipelineInformer } type version struct { @@ -43,3 +45,8 @@ func New(f internalinterfaces.SharedInformerFactory, namespace string, tweakList func (v *version) DevOpsProjects() DevOpsProjectInformer { return &devOpsProjectInformer{factory: v.factory, tweakListOptions: v.tweakListOptions} } + +// Pipelines returns a PipelineInformer. +func (v *version) Pipelines() PipelineInformer { + return &pipelineInformer{factory: v.factory, namespace: v.namespace, tweakListOptions: v.tweakListOptions} +} diff --git a/pkg/client/informers/externalversions/devops/v1alpha3/pipeline.go b/pkg/client/informers/externalversions/devops/v1alpha3/pipeline.go new file mode 100644 index 000000000..b1b6cce96 --- /dev/null +++ b/pkg/client/informers/externalversions/devops/v1alpha3/pipeline.go @@ -0,0 +1,89 @@ +/* +Copyright 2019 The KubeSphere authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +// Code generated by informer-gen. DO NOT EDIT. + +package v1alpha3 + +import ( + time "time" + + v1 "k8s.io/apimachinery/pkg/apis/meta/v1" + runtime "k8s.io/apimachinery/pkg/runtime" + watch "k8s.io/apimachinery/pkg/watch" + cache "k8s.io/client-go/tools/cache" + devopsv1alpha3 "kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3" + versioned "kubesphere.io/kubesphere/pkg/client/clientset/versioned" + internalinterfaces "kubesphere.io/kubesphere/pkg/client/informers/externalversions/internalinterfaces" + v1alpha3 "kubesphere.io/kubesphere/pkg/client/listers/devops/v1alpha3" +) + +// PipelineInformer provides access to a shared informer and lister for +// Pipelines. +type PipelineInformer interface { + Informer() cache.SharedIndexInformer + Lister() v1alpha3.PipelineLister +} + +type pipelineInformer struct { + factory internalinterfaces.SharedInformerFactory + tweakListOptions internalinterfaces.TweakListOptionsFunc + namespace string +} + +// NewPipelineInformer constructs a new informer for Pipeline type. +// Always prefer using an informer factory to get a shared informer instead of getting an independent +// one. This reduces memory footprint and number of connections to the server. +func NewPipelineInformer(client versioned.Interface, namespace string, resyncPeriod time.Duration, indexers cache.Indexers) cache.SharedIndexInformer { + return NewFilteredPipelineInformer(client, namespace, resyncPeriod, indexers, nil) +} + +// NewFilteredPipelineInformer constructs a new informer for Pipeline type. +// Always prefer using an informer factory to get a shared informer instead of getting an independent +// one. This reduces memory footprint and number of connections to the server. +func NewFilteredPipelineInformer(client versioned.Interface, namespace string, resyncPeriod time.Duration, indexers cache.Indexers, tweakListOptions internalinterfaces.TweakListOptionsFunc) cache.SharedIndexInformer { + return cache.NewSharedIndexInformer( + &cache.ListWatch{ + ListFunc: func(options v1.ListOptions) (runtime.Object, error) { + if tweakListOptions != nil { + tweakListOptions(&options) + } + return client.DevopsV1alpha3().Pipelines(namespace).List(options) + }, + WatchFunc: func(options v1.ListOptions) (watch.Interface, error) { + if tweakListOptions != nil { + tweakListOptions(&options) + } + return client.DevopsV1alpha3().Pipelines(namespace).Watch(options) + }, + }, + &devopsv1alpha3.Pipeline{}, + resyncPeriod, + indexers, + ) +} + +func (f *pipelineInformer) defaultInformer(client versioned.Interface, resyncPeriod time.Duration) cache.SharedIndexInformer { + return NewFilteredPipelineInformer(client, f.namespace, resyncPeriod, cache.Indexers{cache.NamespaceIndex: cache.MetaNamespaceIndexFunc}, f.tweakListOptions) +} + +func (f *pipelineInformer) Informer() cache.SharedIndexInformer { + return f.factory.InformerFor(&devopsv1alpha3.Pipeline{}, f.defaultInformer) +} + +func (f *pipelineInformer) Lister() v1alpha3.PipelineLister { + return v1alpha3.NewPipelineLister(f.Informer().GetIndexer()) +} diff --git a/pkg/client/informers/externalversions/generic.go b/pkg/client/informers/externalversions/generic.go index e798db28e..bf122a502 100644 --- a/pkg/client/informers/externalversions/generic.go +++ b/pkg/client/informers/externalversions/generic.go @@ -69,6 +69,8 @@ func (f *sharedInformerFactory) ForResource(resource schema.GroupVersionResource // Group=devops.kubesphere.io, Version=v1alpha3 case v1alpha3.SchemeGroupVersion.WithResource("devopsprojects"): return &genericInformer{resource: resource.GroupResource(), informer: f.Devops().V1alpha3().DevOpsProjects().Informer()}, nil + case v1alpha3.SchemeGroupVersion.WithResource("pipelines"): + return &genericInformer{resource: resource.GroupResource(), informer: f.Devops().V1alpha3().Pipelines().Informer()}, nil // Group=network.kubesphere.io, Version=v1alpha1 case networkv1alpha1.SchemeGroupVersion.WithResource("namespacenetworkpolicies"): diff --git a/pkg/client/listers/devops/v1alpha3/expansion_generated.go b/pkg/client/listers/devops/v1alpha3/expansion_generated.go index e7233bd7e..919ab74c3 100644 --- a/pkg/client/listers/devops/v1alpha3/expansion_generated.go +++ b/pkg/client/listers/devops/v1alpha3/expansion_generated.go @@ -21,3 +21,11 @@ package v1alpha3 // DevOpsProjectListerExpansion allows custom methods to be added to // DevOpsProjectLister. type DevOpsProjectListerExpansion interface{} + +// PipelineListerExpansion allows custom methods to be added to +// PipelineLister. +type PipelineListerExpansion interface{} + +// PipelineNamespaceListerExpansion allows custom methods to be added to +// PipelineNamespaceLister. +type PipelineNamespaceListerExpansion interface{} diff --git a/pkg/client/listers/devops/v1alpha3/pipeline.go b/pkg/client/listers/devops/v1alpha3/pipeline.go new file mode 100644 index 000000000..2a26d23b6 --- /dev/null +++ b/pkg/client/listers/devops/v1alpha3/pipeline.go @@ -0,0 +1,94 @@ +/* +Copyright 2019 The KubeSphere authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +// Code generated by lister-gen. DO NOT EDIT. + +package v1alpha3 + +import ( + "k8s.io/apimachinery/pkg/api/errors" + "k8s.io/apimachinery/pkg/labels" + "k8s.io/client-go/tools/cache" + v1alpha3 "kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3" +) + +// PipelineLister helps list Pipelines. +type PipelineLister interface { + // List lists all Pipelines in the indexer. + List(selector labels.Selector) (ret []*v1alpha3.Pipeline, err error) + // Pipelines returns an object that can list and get Pipelines. + Pipelines(namespace string) PipelineNamespaceLister + PipelineListerExpansion +} + +// pipelineLister implements the PipelineLister interface. +type pipelineLister struct { + indexer cache.Indexer +} + +// NewPipelineLister returns a new PipelineLister. +func NewPipelineLister(indexer cache.Indexer) PipelineLister { + return &pipelineLister{indexer: indexer} +} + +// List lists all Pipelines in the indexer. +func (s *pipelineLister) List(selector labels.Selector) (ret []*v1alpha3.Pipeline, err error) { + err = cache.ListAll(s.indexer, selector, func(m interface{}) { + ret = append(ret, m.(*v1alpha3.Pipeline)) + }) + return ret, err +} + +// Pipelines returns an object that can list and get Pipelines. +func (s *pipelineLister) Pipelines(namespace string) PipelineNamespaceLister { + return pipelineNamespaceLister{indexer: s.indexer, namespace: namespace} +} + +// PipelineNamespaceLister helps list and get Pipelines. +type PipelineNamespaceLister interface { + // List lists all Pipelines in the indexer for a given namespace. + List(selector labels.Selector) (ret []*v1alpha3.Pipeline, err error) + // Get retrieves the Pipeline from the indexer for a given namespace and name. + Get(name string) (*v1alpha3.Pipeline, error) + PipelineNamespaceListerExpansion +} + +// pipelineNamespaceLister implements the PipelineNamespaceLister +// interface. +type pipelineNamespaceLister struct { + indexer cache.Indexer + namespace string +} + +// List lists all Pipelines in the indexer for a given namespace. +func (s pipelineNamespaceLister) List(selector labels.Selector) (ret []*v1alpha3.Pipeline, err error) { + err = cache.ListAllByNamespace(s.indexer, s.namespace, selector, func(m interface{}) { + ret = append(ret, m.(*v1alpha3.Pipeline)) + }) + return ret, err +} + +// Get retrieves the Pipeline from the indexer for a given namespace and name. +func (s pipelineNamespaceLister) Get(name string) (*v1alpha3.Pipeline, error) { + obj, exists, err := s.indexer.GetByKey(s.namespace + "/" + name) + if err != nil { + return nil, err + } + if !exists { + return nil, errors.NewNotFound(v1alpha3.Resource("pipeline"), name) + } + return obj.(*v1alpha3.Pipeline), nil +} diff --git a/pkg/controller/pipeline/pipeline_controller.go b/pkg/controller/pipeline/pipeline_controller.go new file mode 100644 index 000000000..418eb1432 --- /dev/null +++ b/pkg/controller/pipeline/pipeline_controller.go @@ -0,0 +1,276 @@ +package pipeline + +import ( + "fmt" + v1 "k8s.io/api/core/v1" + "k8s.io/apimachinery/pkg/api/errors" + utilruntime "k8s.io/apimachinery/pkg/util/runtime" + "k8s.io/apimachinery/pkg/util/wait" + corev1informer "k8s.io/client-go/informers/core/v1" + clientset "k8s.io/client-go/kubernetes" + "k8s.io/client-go/kubernetes/scheme" + v1core "k8s.io/client-go/kubernetes/typed/core/v1" + corev1lister "k8s.io/client-go/listers/core/v1" + "k8s.io/client-go/tools/cache" + "k8s.io/client-go/tools/record" + "k8s.io/client-go/util/workqueue" + "k8s.io/klog" + devopsv1alpha3 "kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3" + kubesphereclient "kubesphere.io/kubesphere/pkg/client/clientset/versioned" + devopsinformers "kubesphere.io/kubesphere/pkg/client/informers/externalversions/devops/v1alpha3" + devopslisters "kubesphere.io/kubesphere/pkg/client/listers/devops/v1alpha3" + "kubesphere.io/kubesphere/pkg/constants" + devopsClient "kubesphere.io/kubesphere/pkg/simple/client/devops" + "kubesphere.io/kubesphere/pkg/utils/k8sutil" + "kubesphere.io/kubesphere/pkg/utils/sliceutil" + "net/http" + "reflect" + "time" +) + +/** + DevOps project controller is used to maintain the state of the DevOps project. +*/ + +type Controller struct { + client clientset.Interface + kubesphereClient kubesphereclient.Interface + + eventBroadcaster record.EventBroadcaster + eventRecorder record.EventRecorder + + devOpsProjectLister devopslisters.PipelineLister + pipelineSynced cache.InformerSynced + + namespaceLister corev1lister.NamespaceLister + namespaceSynced cache.InformerSynced + + workqueue workqueue.RateLimitingInterface + + workerLoopPeriod time.Duration + + devopsClient devopsClient.Interface +} + +func NewController(client clientset.Interface, + kubesphereClient kubesphereclient.Interface, + devopsClinet devopsClient.Interface, + namespaceInformer corev1informer.NamespaceInformer, + devopsInformer devopsinformers.PipelineInformer) *Controller { + + broadcaster := record.NewBroadcaster() + broadcaster.StartLogging(func(format string, args ...interface{}) { + klog.Info(fmt.Sprintf(format, args)) + }) + broadcaster.StartRecordingToSink(&v1core.EventSinkImpl{Interface: client.CoreV1().Events("")}) + recorder := broadcaster.NewRecorder(scheme.Scheme, v1.EventSource{Component: "pipeline-controller"}) + + v := &Controller{ + client: client, + devopsClient: devopsClinet, + kubesphereClient: kubesphereClient, + workqueue: workqueue.NewNamedRateLimitingQueue(workqueue.DefaultControllerRateLimiter(), "pipeline"), + devOpsProjectLister: devopsInformer.Lister(), + pipelineSynced: devopsInformer.Informer().HasSynced, + namespaceLister: namespaceInformer.Lister(), + namespaceSynced: namespaceInformer.Informer().HasSynced, + workerLoopPeriod: time.Second, + } + + v.eventBroadcaster = broadcaster + v.eventRecorder = recorder + + devopsInformer.Informer().AddEventHandler(cache.ResourceEventHandlerFuncs{ + AddFunc: v.enqueuePipeline, + UpdateFunc: func(oldObj, newObj interface{}) { + old := oldObj.(*devopsv1alpha3.Pipeline) + new := newObj.(*devopsv1alpha3.Pipeline) + if old.ResourceVersion == new.ResourceVersion { + return + } + v.enqueuePipeline(newObj) + }, + DeleteFunc: v.enqueuePipeline, + }) + return v +} + +// enqueuePipeline takes a Foo resource and converts it into a namespace/name +// string which is then put onto the work workqueue. This method should *not* be +// passed resources of any type other than DevOpsProject. +func (c *Controller) enqueuePipeline(obj interface{}) { + var key string + var err error + if key, err = cache.MetaNamespaceKeyFunc(obj); err != nil { + utilruntime.HandleError(err) + return + } + c.workqueue.Add(key) +} + +func (c *Controller) processNextWorkItem() bool { + obj, shutdown := c.workqueue.Get() + + if shutdown { + return false + } + + err := func(obj interface{}) error { + defer c.workqueue.Done(obj) + var key string + var ok bool + + if key, ok = obj.(string); !ok { + c.workqueue.Forget(obj) + utilruntime.HandleError(fmt.Errorf("expected string in workqueue but got %#v", obj)) + return nil + } + if err := c.syncHandler(key); err != nil { + c.workqueue.AddRateLimited(key) + return fmt.Errorf("error syncing '%s': %s, requeuing", key, err.Error()) + } + c.workqueue.Forget(obj) + klog.V(5).Infof("Successfully synced '%s'", key) + return nil + }(obj) + + if err != nil { + klog.Error(err, "could not reconcile devopsProject") + utilruntime.HandleError(err) + return true + } + + return true +} + +func (c *Controller) worker() { + + for c.processNextWorkItem() { + } +} + +func (c *Controller) Start(stopCh <-chan struct{}) error { + return c.Run(1, stopCh) +} + +func (c *Controller) Run(workers int, stopCh <-chan struct{}) error { + defer utilruntime.HandleCrash() + defer c.workqueue.ShutDown() + + klog.Info("starting pipeline controller") + defer klog.Info("shutting down pipeline controller") + + if !cache.WaitForCacheSync(stopCh, c.pipelineSynced) { + return fmt.Errorf("failed to wait for caches to sync") + } + + for i := 0; i < workers; i++ { + go wait.Until(c.worker, c.workerLoopPeriod, stopCh) + } + + <-stopCh + return nil +} + +// syncHandler compares the actual state with the desired, and attempts to +// converge the two. It then updates the Status block of the pipeline resource +// with the current status of the resource. +func (c *Controller) syncHandler(key string) error { + nsName, name, err := cache.SplitMetaNamespaceKey(key) + if err != nil { + klog.Error(err, fmt.Sprintf("could not split copyPipeline meta %s ", key)) + return nil + } + namespace, err := c.namespaceLister.Get(nsName) + if err != nil { + if errors.IsNotFound(err) { + klog.Info(fmt.Sprintf("namespace '%s' in work queue no longer exists ", key)) + return nil + } + klog.Error(err, fmt.Sprintf("could not get namespace %s ", key)) + return err + } + if !isDevOpsProjectAdminNamespace(namespace) { + err := fmt.Errorf("cound not create copyPipeline in normal namespaces %s", namespace.Name) + klog.Warning(err) + return err + } + + pipeline, err := c.devOpsProjectLister.Pipelines(nsName).Get(name) + if err != nil { + if errors.IsNotFound(err) { + klog.Info(fmt.Sprintf("copyPipeline '%s' in work queue no longer exists ", key)) + return nil + } + klog.Error(err, fmt.Sprintf("could not get copyPipeline %s ", key)) + return err + } + + copyPipeline := pipeline.DeepCopy() + // DeletionTimestamp.IsZero() means copyPipeline has not been deleted. + if copyPipeline.ObjectMeta.DeletionTimestamp.IsZero() { + // https://kubernetes.io/docs/tasks/access-kubernetes-api/custom-resources/custom-resource-definitions/#finalizers + if !sliceutil.HasString(copyPipeline.ObjectMeta.Finalizers, devopsv1alpha3.PipelineFinalizerName) { + copyPipeline.ObjectMeta.Finalizers = append(copyPipeline.ObjectMeta.Finalizers, devopsv1alpha3.PipelineFinalizerName) + } + + // Check pipeline config exists, otherwise we will create it. + // if pipeline exists, check & update config + jenkinsPipeline, err := c.devopsClient.GetProjectPipelineConfig(nsName, pipeline.Name) + if err == nil { + if !reflect.DeepEqual(jenkinsPipeline.Spec, copyPipeline.Spec) { + _, err := c.devopsClient.UpdateProjectPipeline(nsName, copyPipeline) + if err != nil { + klog.Error(err, fmt.Sprintf("failed to update pipeline config %s ", key)) + return err + } + } + } else if devopsClient.GetDevOpsStatusCode(err) != http.StatusNotFound { + klog.Error(err, fmt.Sprintf("failed to get copyPipeline %s ", key)) + return err + } else { + _, err := c.devopsClient.CreateProjectPipeline(nsName, copyPipeline) + if err != nil { + klog.Error(err, fmt.Sprintf("failed to get copyPipeline %s ", key)) + return err + } + } + + } else { + // Finalizers processing logic + if sliceutil.HasString(copyPipeline.ObjectMeta.Finalizers, devopsv1alpha3.PipelineFinalizerName) { + _, err := c.devopsClient.GetProjectPipelineConfig(nsName, pipeline.Name) + if err != nil && devopsClient.GetDevOpsStatusCode(err) != http.StatusNotFound { + klog.Error(err, fmt.Sprintf("failed to get pipeline %s ", key)) + return err + } else if err != nil && devopsClient.GetDevOpsStatusCode(err) == http.StatusNotFound { + } else { + if _, err := c.devopsClient.DeleteProjectPipeline(nsName, pipeline.Name); err != nil { + klog.Error(err, fmt.Sprintf("failed to delete pipeline %s in devops", key)) + return err + } + } + copyPipeline.ObjectMeta.Finalizers = sliceutil.RemoveString(copyPipeline.ObjectMeta.Finalizers, func(item string) bool { + return item == devopsv1alpha3.DevOpsProjectFinalizerName + }) + + } + } + if !reflect.DeepEqual(pipeline, copyPipeline) { + _, err = c.kubesphereClient.DevopsV1alpha3().Pipelines(nsName).Update(copyPipeline) + if err != nil { + klog.Error(err, fmt.Sprintf("failed to update pipeline %s ", key)) + return err + } + } + + return nil +} + +func isDevOpsProjectAdminNamespace(namespace *v1.Namespace) bool { + _, ok := namespace.Labels[constants.DevOpsProjectLabelKey] + + return ok && k8sutil.IsControlledBy(namespace.OwnerReferences, + devopsv1alpha3.ResourceKindDevOpsProject, "") + +} diff --git a/pkg/controller/pipeline/pipeline_controller_test.go b/pkg/controller/pipeline/pipeline_controller_test.go new file mode 100644 index 000000000..f0cda3ca0 --- /dev/null +++ b/pkg/controller/pipeline/pipeline_controller_test.go @@ -0,0 +1,400 @@ +package pipeline + +import ( + v1 "k8s.io/api/core/v1" + "kubesphere.io/kubesphere/pkg/constants" + fakeDevOps "kubesphere.io/kubesphere/pkg/simple/client/devops/fake" + "reflect" + "testing" + "time" + + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" + "k8s.io/apimachinery/pkg/runtime" + "k8s.io/apimachinery/pkg/runtime/schema" + "k8s.io/apimachinery/pkg/util/diff" + kubeinformers "k8s.io/client-go/informers" + k8sfake "k8s.io/client-go/kubernetes/fake" + core "k8s.io/client-go/testing" + "k8s.io/client-go/tools/cache" + "k8s.io/client-go/tools/record" + devops "kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3" + "kubesphere.io/kubesphere/pkg/client/clientset/versioned/fake" + informers "kubesphere.io/kubesphere/pkg/client/informers/externalversions" +) + +var ( + alwaysReady = func() bool { return true } + noResyncPeriodFunc = func() time.Duration { return 0 } +) + +type fixture struct { + t *testing.T + + client *fake.Clientset + kubeclient *k8sfake.Clientset + namespaceLister []*v1.Namespace + pipelineLister []*devops.Pipeline + actions []core.Action + kubeactions []core.Action + + kubeobjects []runtime.Object + // Objects from here preloaded into NewSimpleFake. + objects []runtime.Object + // Objects from here preloaded into devops + initDevOpsProject string + initPipeline []*devops.Pipeline + expectPipeline []*devops.Pipeline +} + +func newFixture(t *testing.T) *fixture { + f := &fixture{} + f.t = t + f.objects = []runtime.Object{} + return f +} + +func newNamespace(name string, projectName string) *v1.Namespace { + ns := &v1.Namespace{ + TypeMeta: metav1.TypeMeta{ + Kind: "Namespace", + APIVersion: v1.SchemeGroupVersion.String(), + }, + ObjectMeta: metav1.ObjectMeta{ + Name: name, + Labels: map[string]string{constants.DevOpsProjectLabelKey: projectName}, + }, + } + TRUE := true + ns.ObjectMeta.OwnerReferences = []metav1.OwnerReference{ + { + APIVersion: devops.SchemeGroupVersion.String(), + Kind: devops.ResourceKindDevOpsProject, + Name: projectName, + BlockOwnerDeletion: &TRUE, + Controller: &TRUE, + }, + } + + return ns +} + +func newPipeline(namespace, name string, spec devops.PipelineSpec, withFinalizers bool) *devops.Pipeline { + pipeline := &devops.Pipeline{ + TypeMeta: metav1.TypeMeta{ + Kind: devops.ResourceKindPipeline, + APIVersion: devops.SchemeGroupVersion.String(), + }, + ObjectMeta: metav1.ObjectMeta{ + Namespace: namespace, + Name: name, + }, + Spec: spec, + Status: devops.PipelineStatus{}, + } + if withFinalizers { + pipeline.Finalizers = append(pipeline.Finalizers, devops.PipelineFinalizerName) + } + return pipeline +} + +func newDeletingPipeline(namespace, name string) *devops.Pipeline { + now := metav1.Now() + pipeline := &devops.Pipeline{ + TypeMeta: metav1.TypeMeta{ + Kind: devops.ResourceKindPipeline, + APIVersion: devops.SchemeGroupVersion.String(), + }, + ObjectMeta: metav1.ObjectMeta{ + Namespace: namespace, + Name: name, + DeletionTimestamp: &now, + }, + } + pipeline.Finalizers = append(pipeline.Finalizers, devops.PipelineFinalizerName) + + return pipeline +} + +func (f *fixture) newController() (*Controller, informers.SharedInformerFactory, kubeinformers.SharedInformerFactory, *fakeDevOps.Devops) { + f.client = fake.NewSimpleClientset(f.objects...) + f.kubeclient = k8sfake.NewSimpleClientset(f.kubeobjects...) + + i := informers.NewSharedInformerFactory(f.client, noResyncPeriodFunc()) + k8sI := kubeinformers.NewSharedInformerFactory(f.kubeclient, noResyncPeriodFunc()) + dI := fakeDevOps.NewWithPipelines(f.initDevOpsProject, f.initPipeline...) + + c := NewController(f.kubeclient, f.client, dI, k8sI.Core().V1().Namespaces(), + i.Devops().V1alpha3().Pipelines()) + + c.pipelineSynced = alwaysReady + c.eventRecorder = &record.FakeRecorder{} + + for _, f := range f.pipelineLister { + i.Devops().V1alpha3().Pipelines().Informer().GetIndexer().Add(f) + } + + for _, d := range f.namespaceLister { + k8sI.Core().V1().Namespaces().Informer().GetIndexer().Add(d) + } + + return c, i, k8sI, dI +} + +func (f *fixture) run(fooName string) { + f.runController(fooName, true, false) +} + +func (f *fixture) runExpectError(fooName string) { + f.runController(fooName, true, true) +} + +func (f *fixture) runController(projectName string, startInformers bool, expectError bool) { + c, i, k8sI, dI := f.newController() + if startInformers { + stopCh := make(chan struct{}) + defer close(stopCh) + i.Start(stopCh) + k8sI.Start(stopCh) + } + + err := c.syncHandler(projectName) + if !expectError && err != nil { + f.t.Errorf("error syncing foo: %v", err) + } else if expectError && err == nil { + f.t.Error("expected error syncing foo, got nil") + } + + actions := filterInformerActions(f.client.Actions()) + for i, action := range actions { + if len(f.actions) < i+1 { + f.t.Errorf("%d unexpected actions: %+v", len(actions)-len(f.actions), actions[i:]) + break + } + + expectedAction := f.actions[i] + checkAction(expectedAction, action, f.t) + } + k8sActions := filterInformerActions(f.kubeclient.Actions()) + for i, action := range k8sActions { + if len(f.kubeactions) < i+1 { + f.t.Errorf("%d unexpected actions: %+v", len(k8sActions)-len(f.kubeactions), k8sActions[i:]) + break + } + + expectedAction := f.kubeactions[i] + checkAction(expectedAction, action, f.t) + } + + if len(f.kubeactions) > len(k8sActions) { + f.t.Errorf("%d additional expected actions:%+v", len(f.kubeactions)-len(k8sActions), f.kubeactions[len(k8sActions):]) + } + + if len(f.actions) > len(actions) { + f.t.Errorf("%d additional expected actions:%+v", len(f.actions)-len(actions), f.actions[len(actions):]) + } + if len(dI.Pipelines[f.initDevOpsProject]) != len(f.expectPipeline) { + f.t.Errorf(" unexpected objects: %v", dI.Projects) + } + for _, pipeline := range f.expectPipeline { + actualPipeline := dI.Pipelines[f.initDevOpsProject][pipeline.Name] + if !reflect.DeepEqual(actualPipeline, pipeline) { + f.t.Errorf(" pipeline %+v not match %+v", pipeline, actualPipeline) + } + + } +} + +// checkAction verifies that expected and actual actions are equal and both have +// same attached resources +func checkAction(expected, actual core.Action, t *testing.T) { + if !(expected.Matches(actual.GetVerb(), actual.GetResource().Resource) && actual.GetSubresource() == expected.GetSubresource()) { + t.Errorf("Expected\n\t%#v\ngot\n\t%#v", expected, actual) + return + } + + if reflect.TypeOf(actual) != reflect.TypeOf(expected) { + t.Errorf("Action has wrong type. Expected: %t. Got: %t", expected, actual) + return + } + + switch a := actual.(type) { + case core.CreateActionImpl: + e, _ := expected.(core.CreateActionImpl) + expObject := e.GetObject() + object := a.GetObject() + + if !reflect.DeepEqual(expObject, object) { + t.Errorf("Action %s %s has wrong object\nDiff:\n %s", + a.GetVerb(), a.GetResource().Resource, diff.ObjectGoPrintSideBySide(expObject, object)) + } + case core.UpdateActionImpl: + e, _ := expected.(core.UpdateActionImpl) + expObject := e.GetObject() + object := a.GetObject() + + if !reflect.DeepEqual(expObject, object) { + t.Errorf("Action %s %s has wrong object\nDiff:\n %s", + a.GetVerb(), a.GetResource().Resource, diff.ObjectGoPrintSideBySide(expObject, object)) + } + case core.PatchActionImpl: + e, _ := expected.(core.PatchActionImpl) + expPatch := e.GetPatch() + patch := a.GetPatch() + + if !reflect.DeepEqual(expPatch, patch) { + t.Errorf("Action %s %s has wrong patch\nDiff:\n %s", + a.GetVerb(), a.GetResource().Resource, diff.ObjectGoPrintSideBySide(expPatch, patch)) + } + default: + t.Errorf("Uncaptured Action %s %s, you should explicitly add a case to capture it", + actual.GetVerb(), actual.GetResource().Resource) + } +} + +// filterInformerActions filters list and watch actions for testing resources. +// Since list and watch don't change resource state we can filter it to lower +// nose level in our tests. +func filterInformerActions(actions []core.Action) []core.Action { + ret := []core.Action{} + for _, action := range actions { + if len(action.GetNamespace()) == 0 && + (action.Matches("list", devops.ResourcePluralPipeline) || + action.Matches("watch", devops.ResourcePluralPipeline) || + action.Matches("list", "namespaces") || + action.Matches("watch", "namespaces")) { + continue + } + ret = append(ret, action) + } + + return ret +} + +func (f *fixture) expectUpdatePipelineAction(p *devops.Pipeline) { + action := core.NewUpdateAction(schema.GroupVersionResource{ + Version: devops.SchemeGroupVersion.Version, + Resource: devops.ResourcePluralPipeline, + Group: devops.SchemeGroupVersion.Group, + }, p.Namespace, p) + f.actions = append(f.actions, action) +} + +func getKey(p *devops.Pipeline, t *testing.T) string { + key, err := cache.DeletionHandlingMetaNamespaceKeyFunc(p) + if err != nil { + t.Errorf("Unexpected error getting key for pipeline %v: %v", p.Name, err) + return "" + } + return key +} + +func TestDoNothing(t *testing.T) { + f := newFixture(t) + nsName := "test-123" + pipelineName := "test" + projectName := "test_project" + + ns := newNamespace(nsName, projectName) + pipeline := newPipeline(nsName, pipelineName, devops.PipelineSpec{}, true) + + f.pipelineLister = append(f.pipelineLister, pipeline) + f.namespaceLister = append(f.namespaceLister, ns) + f.objects = append(f.objects, pipeline) + f.initDevOpsProject = nsName + f.initPipeline = []*devops.Pipeline{pipeline} + f.expectPipeline = []*devops.Pipeline{pipeline} + + f.run(getKey(pipeline, t)) +} + +func TestAddPipelineFinalizers(t *testing.T) { + f := newFixture(t) + nsName := "test-123" + pipelineName := "test" + projectName := "test_project" + + ns := newNamespace(nsName, projectName) + pipeline := newPipeline(nsName, pipelineName, devops.PipelineSpec{}, false) + + expectPipeline := newPipeline(nsName, pipelineName, devops.PipelineSpec{}, true) + + f.pipelineLister = append(f.pipelineLister, pipeline) + f.namespaceLister = append(f.namespaceLister, ns) + f.objects = append(f.objects, pipeline) + f.initDevOpsProject = nsName + f.initPipeline = []*devops.Pipeline{pipeline} + f.expectPipeline = []*devops.Pipeline{pipeline} + f.expectUpdatePipelineAction(expectPipeline) + f.run(getKey(pipeline, t)) +} + +func TestCreatePipeline(t *testing.T) { + f := newFixture(t) + nsName := "test-123" + pipelineName := "test" + projectName := "test_project" + + ns := newNamespace(nsName, projectName) + pipeline := newPipeline(nsName, pipelineName, devops.PipelineSpec{}, true) + + f.pipelineLister = append(f.pipelineLister, pipeline) + f.namespaceLister = append(f.namespaceLister, ns) + f.objects = append(f.objects, pipeline) + f.initDevOpsProject = nsName + f.expectPipeline = []*devops.Pipeline{pipeline} + f.run(getKey(pipeline, t)) +} + +func TestDeletePipeline(t *testing.T) { + f := newFixture(t) + nsName := "test-123" + pipelineName := "test" + projectName := "test_project" + + ns := newNamespace(nsName, projectName) + pipeline := newDeletingPipeline(nsName, pipelineName) + + f.pipelineLister = append(f.pipelineLister, pipeline) + f.namespaceLister = append(f.namespaceLister, ns) + f.objects = append(f.objects, pipeline) + f.initDevOpsProject = nsName + f.initPipeline = []*devops.Pipeline{pipeline} + f.expectPipeline = []*devops.Pipeline{} + f.run(getKey(pipeline, t)) +} + +func TestDeleteNotExistPipeline(t *testing.T) { + f := newFixture(t) + nsName := "test-123" + pipelineName := "test" + projectName := "test_project" + + ns := newNamespace(nsName, projectName) + pipeline := newDeletingPipeline(nsName, pipelineName) + + f.pipelineLister = append(f.pipelineLister, pipeline) + f.namespaceLister = append(f.namespaceLister, ns) + f.objects = append(f.objects, pipeline) + f.initDevOpsProject = nsName + f.initPipeline = []*devops.Pipeline{} + f.expectPipeline = []*devops.Pipeline{} + f.run(getKey(pipeline, t)) +} + +func TestUpdatePipelineConfig(t *testing.T) { + f := newFixture(t) + nsName := "test-123" + pipelineName := "test" + projectName := "test_project" + + ns := newNamespace(nsName, projectName) + initPipeline := newPipeline(nsName, pipelineName, devops.PipelineSpec{}, true) + expectPipeline := newPipeline(nsName, pipelineName, devops.PipelineSpec{Type: "aa"}, true) + f.pipelineLister = append(f.pipelineLister, expectPipeline) + f.namespaceLister = append(f.namespaceLister, ns) + f.objects = append(f.objects, expectPipeline) + f.initDevOpsProject = nsName + f.initPipeline = []*devops.Pipeline{initPipeline} + f.expectPipeline = []*devops.Pipeline{expectPipeline} + f.run(getKey(expectPipeline, t)) +} diff --git a/pkg/kapis/devops/v1alpha2/handler.go b/pkg/kapis/devops/v1alpha2/handler.go index 8aa4222f6..addf7d61e 100644 --- a/pkg/kapis/devops/v1alpha2/handler.go +++ b/pkg/kapis/devops/v1alpha2/handler.go @@ -13,7 +13,6 @@ import ( type ProjectPipelineHandler struct { projectCredentialOperator devops.ProjectCredentialOperator projectMemberOperator devops.ProjectMemberOperator - projectPipelineOperator devops.ProjectPipelineOperator devopsOperator devops.DevopsOperator projectOperator devops.ProjectOperator } @@ -27,7 +26,6 @@ func NewProjectPipelineHandler(devopsClient devopsClient.Interface, dbClient *my return ProjectPipelineHandler{ projectCredentialOperator: devops.NewProjectCredentialOperator(devopsClient, dbClient), projectMemberOperator: devops.NewProjectMemberOperator(devopsClient, dbClient), - projectPipelineOperator: devops.NewProjectPipelineOperator(devopsClient), devopsOperator: devops.NewDevopsOperator(devopsClient), projectOperator: devops.NewProjectOperator(dbClient), } diff --git a/pkg/kapis/devops/v1alpha2/project_pipeline.go b/pkg/kapis/devops/v1alpha2/project_pipeline.go deleted file mode 100644 index 5173bb8d0..000000000 --- a/pkg/kapis/devops/v1alpha2/project_pipeline.go +++ /dev/null @@ -1,134 +0,0 @@ -/* -Copyright 2019 The KubeSphere Authors. -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - http://www.apache.org/licenses/LICENSE-2.0 -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package v1alpha2 - -import ( - "github.com/emicklei/go-restful" - "k8s.io/klog" - "kubesphere.io/kubesphere/pkg/api" - "kubesphere.io/kubesphere/pkg/constants" - "kubesphere.io/kubesphere/pkg/simple/client/devops" -) - -func (h ProjectPipelineHandler) CreateDevOpsProjectPipelineHandler(request *restful.Request, resp *restful.Response) { - - projectId := request.PathParameter("devops") - username := request.HeaderParameter(constants.UserNameHeader) - var pipeline *devops.ProjectPipeline - err := request.ReadEntity(&pipeline) - if err != nil { - klog.Errorf("%+v", err) - api.HandleBadRequest(resp, nil, err) - return - } - err = h.projectOperator.CheckProjectUserInRole(username, projectId, []string{devops.ProjectOwner, devops.ProjectMaintainer}) - if err != nil { - klog.Errorf("%+v", err) - api.HandleForbidden(resp, nil, err) - return - } - pipelineName, err := h.projectPipelineOperator.CreateProjectPipeline(projectId, pipeline) - - if err != nil { - klog.Errorf("%+v", err) - api.HandleInternalError(resp, nil, err) - return - } - - resp.WriteAsJson(struct { - Name string `json:"name"` - }{Name: pipelineName}) - return -} - -func (h ProjectPipelineHandler) DeleteDevOpsProjectPipelineHandler(request *restful.Request, resp *restful.Response) { - projectId := request.PathParameter("devops") - username := request.HeaderParameter(constants.UserNameHeader) - pipelineId := request.PathParameter("pipeline") - - err := h.projectOperator.CheckProjectUserInRole(username, projectId, []string{devops.ProjectOwner, devops.ProjectMaintainer}) - if err != nil { - klog.Errorf("%+v", err) - api.HandleForbidden(resp, request, err) - return - } - pipelineName, err := h.projectPipelineOperator.DeleteProjectPipeline(projectId, pipelineId) - - if err != nil { - klog.Errorf("%+v", err) - api.HandleInternalError(resp, nil, err) - return - } - - resp.WriteAsJson(struct { - Name string `json:"name"` - }{Name: pipelineName}) - return -} - -func (h ProjectPipelineHandler) UpdateDevOpsProjectPipelineHandler(request *restful.Request, resp *restful.Response) { - - projectId := request.PathParameter("devops") - username := request.HeaderParameter(constants.UserNameHeader) - pipelineId := request.PathParameter("pipeline") - var pipeline *devops.ProjectPipeline - err := request.ReadEntity(&pipeline) - if err != nil { - klog.Errorf("%+v", err) - api.HandleBadRequest(resp, nil, err) - return - } - err = h.projectOperator.CheckProjectUserInRole(username, projectId, []string{devops.ProjectOwner, devops.ProjectMaintainer}) - if err != nil { - klog.Errorf("%+v", err) - api.HandleForbidden(resp, nil, err) - return - } - pipelineName, err := h.projectPipelineOperator.UpdateProjectPipeline(projectId, pipelineId, pipeline) - - if err != nil { - klog.Errorf("%+v", err) - api.HandleInternalError(resp, nil, err) - return - } - - resp.WriteAsJson(struct { - Name string `json:"name"` - }{Name: pipelineName}) - return -} - -func (h ProjectPipelineHandler) GetDevOpsProjectPipelineConfigHandler(request *restful.Request, resp *restful.Response) { - - projectId := request.PathParameter("devops") - username := request.HeaderParameter(constants.UserNameHeader) - pipelineId := request.PathParameter("pipeline") - - err := h.projectOperator.CheckProjectUserInRole(username, projectId, []string{devops.ProjectOwner, devops.ProjectMaintainer}) - if err != nil { - klog.Errorf("%+v", err) - api.HandleForbidden(resp, nil, err) - return - } - pipeline, err := h.projectPipelineOperator.GetProjectPipelineConfig(projectId, pipelineId) - - if err != nil { - klog.Errorf("%+v", err) - api.HandleInternalError(resp, nil, err) - return - } - - resp.WriteAsJson(pipeline) - return -} diff --git a/pkg/kapis/devops/v1alpha2/register.go b/pkg/kapis/devops/v1alpha2/register.go index 2f782517f..88806a656 100644 --- a/pkg/kapis/devops/v1alpha2/register.go +++ b/pkg/kapis/devops/v1alpha2/register.go @@ -156,40 +156,6 @@ func AddToContainer(c *restful.Container, devopsClient devops.Interface, Param(webservice.PathParameter("member", "member's username, e.g. admin")). Writes(devops.ProjectMembership{})) - webservice.Route(webservice.POST("/devops/{devops}/pipelines"). - To(projectPipelineHander.CreateDevOpsProjectPipelineHandler). - Doc("Create a DevOps project pipeline"). - Param(webservice.PathParameter("devops", "DevOps project's ID, e.g. project-RRRRAzLBlLEm")). - Metadata(restfulspec.KeyOpenAPITags, []string{constants.DevOpsPipelineTag}). - Returns(http.StatusOK, RespOK, devops.ProjectPipeline{}). - Writes(devops.ProjectPipeline{}). - Reads(devops.ProjectPipeline{})) - - webservice.Route(webservice.PUT("/devops/{devops}/pipelines/{pipeline}"). - To(projectPipelineHander.UpdateDevOpsProjectPipelineHandler). - Doc("Update the specified pipeline of the DevOps project"). - Param(webservice.PathParameter("devops", "DevOps project's ID, e.g. project-RRRRAzLBlLEm")). - Param(webservice.PathParameter("pipeline", "the name of pipeline, e.g. sample-pipeline")). - Metadata(restfulspec.KeyOpenAPITags, []string{constants.DevOpsPipelineTag}). - Writes(devops.ProjectPipeline{}). - Reads(devops.ProjectPipeline{})) - - webservice.Route(webservice.DELETE("/devops/{devops}/pipelines/{pipeline}"). - To(projectPipelineHander.DeleteDevOpsProjectPipelineHandler). - Doc("Delete the specified pipeline of the DevOps project"). - Metadata(restfulspec.KeyOpenAPITags, []string{constants.DevOpsPipelineTag}). - Param(webservice.PathParameter("devops", "DevOps project's ID, e.g. project-RRRRAzLBlLEm")). - Param(webservice.PathParameter("pipeline", "the name of pipeline, e.g. sample-pipeline"))) - - webservice.Route(webservice.GET("/devops/{devops}/pipelines/{pipeline}/config"). - To(projectPipelineHander.GetDevOpsProjectPipelineConfigHandler). - Doc("Get the configuration information of the specified pipeline of the DevOps Project"). - Metadata(restfulspec.KeyOpenAPITags, []string{constants.DevOpsPipelineTag}). - Param(webservice.PathParameter("devops", "DevOps project's ID, e.g. project-RRRRAzLBlLEm")). - Param(webservice.PathParameter("pipeline", "the name of pipeline, e.g. sample-pipeline")). - Returns(http.StatusOK, RespOK, devops.ProjectPipeline{}). - Writes(devops.ProjectPipeline{})) - webservice.Route(webservice.POST("/devops/{devops}/credentials"). To(projectPipelineHander.CreateDevOpsProjectCredentialHandler). Doc("Create a credential in the specified DevOps project"). diff --git a/pkg/models/devops/project_pipeline_handler.go b/pkg/models/devops/project_pipeline_handler.go deleted file mode 100644 index c263846e8..000000000 --- a/pkg/models/devops/project_pipeline_handler.go +++ /dev/null @@ -1,65 +0,0 @@ -/* -Copyright 2019 The KubeSphere Authors. -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - http://www.apache.org/licenses/LICENSE-2.0 -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package devops - -import ( - "fmt" - "github.com/emicklei/go-restful" - "k8s.io/klog" - "kubesphere.io/kubesphere/pkg/simple/client/devops" - "net/http" -) - -type ProjectPipelineOperator interface { - CreateProjectPipeline(projectId string, pipeline *devops.ProjectPipeline) (string, error) - DeleteProjectPipeline(projectId string, pipelineId string) (string, error) - UpdateProjectPipeline(projectId, pipelineId string, pipeline *devops.ProjectPipeline) (string, error) - GetProjectPipelineConfig(projectId, pipelineId string) (*devops.ProjectPipeline, error) -} -type projectPipelineOperator struct { - pipelineOperator devops.ProjectPipelineOperator -} - -func NewProjectPipelineOperator(devopsClient devops.ProjectPipelineOperator) ProjectPipelineOperator { - return &projectPipelineOperator{ - pipelineOperator: devopsClient, - } -} - -func (o *projectPipelineOperator) CreateProjectPipeline(projectId string, pipeline *devops.ProjectPipeline) (string, error) { - return o.pipelineOperator.CreateProjectPipeline(projectId, pipeline) -} - -func (o *projectPipelineOperator) DeleteProjectPipeline(projectId string, pipelineId string) (string, error) { - return o.pipelineOperator.DeleteProjectPipeline(projectId, pipelineId) -} - -func (o *projectPipelineOperator) UpdateProjectPipeline(projectId, pipelineId string, pipeline *devops.ProjectPipeline) (string, error) { - - switch pipeline.Type { - case devops.NoScmPipelineType: - pipeline.Pipeline.Name = pipelineId - case devops.MultiBranchPipelineType: - pipeline.MultiBranchPipeline.Name = pipelineId - default: - err := fmt.Errorf("error unsupport pipeline type") - klog.Errorf("%+v", err) - return "", restful.NewError(http.StatusBadRequest, err.Error()) - } - return o.pipelineOperator.UpdateProjectPipeline(projectId, pipeline) -} - -func (o *projectPipelineOperator) GetProjectPipelineConfig(projectId, pipelineId string) (*devops.ProjectPipeline, error) { - return o.pipelineOperator.GetProjectPipelineConfig(projectId, pipelineId) -} diff --git a/pkg/simple/client/devops/fake/fakedevops.go b/pkg/simple/client/devops/fake/fakedevops.go index 2fbb14df4..5ce79a297 100644 --- a/pkg/simple/client/devops/fake/fakedevops.go +++ b/pkg/simple/client/devops/fake/fakedevops.go @@ -1,7 +1,10 @@ package fake import ( + "fmt" + "github.com/emicklei/go-restful" "io/ioutil" + devopsv1alpha3 "kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3" "kubesphere.io/kubesphere/pkg/simple/client/devops" "net/http" "net/url" @@ -12,30 +15,49 @@ type Devops struct { Data map[string]interface{} Projects map[string]interface{} + + Pipelines map[string]map[string]*devopsv1alpha3.Pipeline } func New(projects ...string) *Devops { d := &Devops{ - Data: nil, - Projects: map[string]interface{}{}, + Data: nil, + Projects: map[string]interface{}{}, + Pipelines: map[string]map[string]*devopsv1alpha3.Pipeline{}, } for _, p := range projects { d.Projects[p] = true } return d } +func NewWithPipelines(project string, pipelines ...*devopsv1alpha3.Pipeline) *Devops { + d := &Devops{ + Data: nil, + Projects: map[string]interface{}{}, + Pipelines: map[string]map[string]*devopsv1alpha3.Pipeline{}, + } + + d.Projects[project] = true + d.Pipelines[project] = map[string]*devopsv1alpha3.Pipeline{} + for _, f := range pipelines { + d.Pipelines[project][f.Name] = f + } + return d +} func (d *Devops) CreateDevOpsProject(projectId string) (string, error) { if _, ok := d.Projects[projectId]; ok { return projectId, nil } d.Projects[projectId] = true + d.Pipelines[projectId] = map[string]*devopsv1alpha3.Pipeline{} return projectId, nil } func (d *Devops) DeleteDevOpsProject(projectId string) error { if _, ok := d.Projects[projectId]; ok { delete(d.Projects, projectId) + delete(d.Pipelines, projectId) return nil } else { return &devops.ErrorResponse{ @@ -276,15 +298,122 @@ func (d *Devops) DeleteProjectMember(membership *devops.ProjectMembership) (*dev } // ProjectPipelineOperator -func (d *Devops) CreateProjectPipeline(projectId string, pipeline *devops.ProjectPipeline) (string, error) { +func (d *Devops) CreateProjectPipeline(projectId string, pipeline *devopsv1alpha3.Pipeline) (string, error) { + if _, ok := d.Pipelines[projectId][pipeline.Name]; ok { + err := fmt.Errorf("pipeline name [%s] has been used", pipeline.Name) + return "", restful.NewError(http.StatusConflict, err.Error()) + } + d.Pipelines[projectId][pipeline.Name] = pipeline return "", nil } func (d *Devops) DeleteProjectPipeline(projectId string, pipelineId string) (string, error) { + if _, ok := d.Pipelines[projectId][pipelineId]; !ok { + err := &devops.ErrorResponse{ + Body: []byte{}, + Response: &http.Response{ + Status: "404 Not Found", + StatusCode: 404, + Proto: "HTTP/1.1", + ProtoMajor: 1, + ProtoMinor: 1, + ContentLength: 50, + Header: http.Header{ + "Foo": []string{"Bar"}, + }, + Body: ioutil.NopCloser(strings.NewReader("foo")), // shouldn't be used + Request: &http.Request{ + Method: "", + URL: &url.URL{ + Scheme: "", + Opaque: "", + User: nil, + Host: "", + Path: "", + RawPath: "", + ForceQuery: false, + RawQuery: "", + Fragment: "", + }, + }, + }, + Message: "", + } + return "", err + } + delete(d.Pipelines[projectId], pipelineId) return "", nil } -func (d *Devops) UpdateProjectPipeline(projectId string, pipeline *devops.ProjectPipeline) (string, error) { +func (d *Devops) UpdateProjectPipeline(projectId string, pipeline *devopsv1alpha3.Pipeline) (string, error) { + if _, ok := d.Pipelines[projectId][pipeline.Name]; !ok { + err := &devops.ErrorResponse{ + Body: []byte{}, + Response: &http.Response{ + Status: "404 Not Found", + StatusCode: 404, + Proto: "HTTP/1.1", + ProtoMajor: 1, + ProtoMinor: 1, + ContentLength: 50, + Header: http.Header{ + "Foo": []string{"Bar"}, + }, + Body: ioutil.NopCloser(strings.NewReader("foo")), // shouldn't be used + Request: &http.Request{ + Method: "", + URL: &url.URL{ + Scheme: "", + Opaque: "", + User: nil, + Host: "", + Path: "", + RawPath: "", + ForceQuery: false, + RawQuery: "", + Fragment: "", + }, + }, + }, + Message: "", + } + return "", err + } + d.Pipelines[projectId][pipeline.Name] = pipeline return "", nil } -func (d *Devops) GetProjectPipelineConfig(projectId, pipelineId string) (*devops.ProjectPipeline, error) { - return nil, nil +func (d *Devops) GetProjectPipelineConfig(projectId, pipelineId string) (*devopsv1alpha3.Pipeline, error) { + if _, ok := d.Pipelines[projectId][pipelineId]; !ok { + err := &devops.ErrorResponse{ + Body: []byte{}, + Response: &http.Response{ + Status: "404 Not Found", + StatusCode: 404, + Proto: "HTTP/1.1", + ProtoMajor: 1, + ProtoMinor: 1, + ContentLength: 50, + Header: http.Header{ + "Foo": []string{"Bar"}, + }, + Body: ioutil.NopCloser(strings.NewReader("foo")), // shouldn't be used + Request: &http.Request{ + Method: "", + URL: &url.URL{ + Scheme: "", + Opaque: "", + User: nil, + Host: "", + Path: "", + RawPath: "", + ForceQuery: false, + RawQuery: "", + Fragment: "", + }, + }, + }, + Message: "", + } + return nil, err + } + + return d.Pipelines[projectId][pipelineId], nil } diff --git a/pkg/simple/client/devops/jenkins/constants.go b/pkg/simple/client/devops/jenkins/constants.go index eee426b08..7c8c2a8aa 100644 --- a/pkg/simple/client/devops/jenkins/constants.go +++ b/pkg/simple/client/devops/jenkins/constants.go @@ -18,3 +18,12 @@ const ( GLOBAL_ROLE = "globalRoles" PROJECT_ROLE = "projectRoles" ) + +var ParameterTypeMap = map[string]string{ + "hudson.model.StringParameterDefinition": "string", + "hudson.model.ChoiceParameterDefinition": "choice", + "hudson.model.TextParameterDefinition": "text", + "hudson.model.BooleanParameterDefinition": "boolean", + "hudson.model.FileParameterDefinition": "file", + "hudson.model.PasswordParameterDefinition": "password", +} diff --git a/pkg/simple/client/devops/jenkins/pipeline_internal.go b/pkg/simple/client/devops/jenkins/pipeline_internal.go index cb84f4fc1..e93cb46e0 100644 --- a/pkg/simple/client/devops/jenkins/pipeline_internal.go +++ b/pkg/simple/client/devops/jenkins/pipeline_internal.go @@ -3,7 +3,7 @@ package jenkins import ( "fmt" "github.com/beevik/etree" - "kubesphere.io/kubesphere/pkg/simple/client/devops" + devopsv1alpha3 "kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3" "strconv" "strings" "time" @@ -16,7 +16,7 @@ func replaceXmlVersion(config, oldVersion, targetVersion string) string { return output } -func createPipelineConfigXml(pipeline *devops.NoScmPipeline) (string, error) { +func createPipelineConfigXml(pipeline *devopsv1alpha3.NoScmPipeline) (string, error) { doc := etree.NewDocument() xmlString := ` @@ -82,8 +82,8 @@ func createPipelineConfigXml(pipeline *devops.NoScmPipeline) (string, error) { return replaceXmlVersion(stringXml, "1.0", "1.1"), err } -func parsePipelineConfigXml(config string) (*devops.NoScmPipeline, error) { - pipeline := &devops.NoScmPipeline{} +func parsePipelineConfigXml(config string) (*devopsv1alpha3.NoScmPipeline, error) { + pipeline := &devopsv1alpha3.NoScmPipeline{} config = replaceXmlVersion(config, "1.1", "1.0") doc := etree.NewDocument() err := doc.ReadFromString(config) @@ -106,14 +106,14 @@ func parsePipelineConfigXml(config string) (*devops.NoScmPipeline, error) { strategy := properties. SelectElement("jenkins.model.BuildDiscarderProperty"). SelectElement("strategy") - pipeline.Discarder = &devops.DiscarderProperty{ + pipeline.Discarder = &devopsv1alpha3.DiscarderProperty{ DaysToKeep: strategy.SelectElement("daysToKeep").Text(), NumToKeep: strategy.SelectElement("numToKeep").Text(), } } - pipeline.Parameters = &devops.Parameters{} + pipeline.Parameters = getParametersfromEtree(properties) - if len(*pipeline.Parameters) == 0 { + if len(pipeline.Parameters) == 0 { pipeline.Parameters = nil } @@ -122,13 +122,13 @@ func parsePipelineConfigXml(config string) (*devops.NoScmPipeline, error) { "org.jenkinsci.plugins.workflow.job.properties.PipelineTriggersJobProperty"); triggerProperty != nil { triggers := triggerProperty.SelectElement("triggers") if timerTrigger := triggers.SelectElement("hudson.triggers.TimerTrigger"); timerTrigger != nil { - pipeline.TimerTrigger = &devops.TimerTrigger{ + pipeline.TimerTrigger = &devopsv1alpha3.TimerTrigger{ Cron: timerTrigger.SelectElement("spec").Text(), } } } if authToken := flow.SelectElement("authToken"); authToken != nil { - pipeline.RemoteTrigger = &devops.RemoteTrigger{ + pipeline.RemoteTrigger = &devopsv1alpha3.RemoteTrigger{ Token: authToken.Text(), } } @@ -140,11 +140,11 @@ func parsePipelineConfigXml(config string) (*devops.NoScmPipeline, error) { return pipeline, nil } -func appendParametersToEtree(properties *etree.Element, parameters *devops.Parameters) { +func appendParametersToEtree(properties *etree.Element, parameters []devopsv1alpha3.Parameter) { parameterDefinitions := properties.CreateElement("hudson.model.ParametersDefinitionProperty"). CreateElement("parameterDefinitions") - for _, parameter := range *parameters { - for className, typeName := range devops.ParameterTypeMap { + for _, parameter := range parameters { + for className, typeName := range ParameterTypeMap { if typeName == parameter.Type { paramDefine := parameterDefinitions.CreateElement(className) paramDefine.CreateElement("name").SetText(parameter.Name) @@ -169,51 +169,51 @@ func appendParametersToEtree(properties *etree.Element, parameters *devops.Param } } -func getParametersfromEtree(properties *etree.Element) *devops.Parameters { - var parameters devops.Parameters +func getParametersfromEtree(properties *etree.Element) []devopsv1alpha3.Parameter { + var parameters []devopsv1alpha3.Parameter if parametersProperty := properties.SelectElement("hudson.model.ParametersDefinitionProperty"); parametersProperty != nil { params := parametersProperty.SelectElement("parameterDefinitions").ChildElements() for _, param := range params { switch param.Tag { case "hudson.model.StringParameterDefinition": - parameters = append(parameters, &devops.Parameter{ + parameters = append(parameters, devopsv1alpha3.Parameter{ Name: param.SelectElement("name").Text(), Description: param.SelectElement("description").Text(), DefaultValue: param.SelectElement("defaultValue").Text(), - Type: devops.ParameterTypeMap["hudson.model.StringParameterDefinition"], + Type: ParameterTypeMap["hudson.model.StringParameterDefinition"], }) case "hudson.model.BooleanParameterDefinition": - parameters = append(parameters, &devops.Parameter{ + parameters = append(parameters, devopsv1alpha3.Parameter{ Name: param.SelectElement("name").Text(), Description: param.SelectElement("description").Text(), DefaultValue: param.SelectElement("defaultValue").Text(), - Type: devops.ParameterTypeMap["hudson.model.BooleanParameterDefinition"], + Type: ParameterTypeMap["hudson.model.BooleanParameterDefinition"], }) case "hudson.model.TextParameterDefinition": - parameters = append(parameters, &devops.Parameter{ + parameters = append(parameters, devopsv1alpha3.Parameter{ Name: param.SelectElement("name").Text(), Description: param.SelectElement("description").Text(), DefaultValue: param.SelectElement("defaultValue").Text(), - Type: devops.ParameterTypeMap["hudson.model.TextParameterDefinition"], + Type: ParameterTypeMap["hudson.model.TextParameterDefinition"], }) case "hudson.model.FileParameterDefinition": - parameters = append(parameters, &devops.Parameter{ + parameters = append(parameters, devopsv1alpha3.Parameter{ Name: param.SelectElement("name").Text(), Description: param.SelectElement("description").Text(), - Type: devops.ParameterTypeMap["hudson.model.FileParameterDefinition"], + Type: ParameterTypeMap["hudson.model.FileParameterDefinition"], }) case "hudson.model.PasswordParameterDefinition": - parameters = append(parameters, &devops.Parameter{ + parameters = append(parameters, devopsv1alpha3.Parameter{ Name: param.SelectElement("name").Text(), Description: param.SelectElement("description").Text(), DefaultValue: param.SelectElement("name").Text(), - Type: devops.ParameterTypeMap["hudson.model.PasswordParameterDefinition"], + Type: ParameterTypeMap["hudson.model.PasswordParameterDefinition"], }) case "hudson.model.ChoiceParameterDefinition": - choiceParameter := &devops.Parameter{ + choiceParameter := devopsv1alpha3.Parameter{ Name: param.SelectElement("name").Text(), Description: param.SelectElement("description").Text(), - Type: devops.ParameterTypeMap["hudson.model.ChoiceParameterDefinition"], + Type: ParameterTypeMap["hudson.model.ChoiceParameterDefinition"], } choices := param.SelectElement("choices").SelectElement("a").SelectElements("string") for _, choice := range choices { @@ -222,7 +222,7 @@ func getParametersfromEtree(properties *etree.Element) *devops.Parameters { choiceParameter.DefaultValue = strings.TrimSpace(choiceParameter.DefaultValue) parameters = append(parameters, choiceParameter) default: - parameters = append(parameters, &devops.Parameter{ + parameters = append(parameters, devopsv1alpha3.Parameter{ Name: param.SelectElement("name").Text(), Description: param.SelectElement("description").Text(), DefaultValue: "unknown", @@ -231,10 +231,10 @@ func getParametersfromEtree(properties *etree.Element) *devops.Parameters { } } } - return ¶meters + return parameters } -func appendGitSourceToEtree(source *etree.Element, gitSource *devops.GitSource) { +func appendGitSourceToEtree(source *etree.Element, gitSource *devopsv1alpha3.GitSource) { source.CreateAttr("class", "jenkins.plugins.git.GitSCMSource") source.CreateAttr("plugin", "git") source.CreateElement("id").SetText(gitSource.ScmId) @@ -274,8 +274,8 @@ func appendGitSourceToEtree(source *etree.Element, gitSource *devops.GitSource) return } -func getGitSourcefromEtree(source *etree.Element) *devops.GitSource { - var gitSource devops.GitSource +func getGitSourcefromEtree(source *etree.Element) *devopsv1alpha3.GitSource { + var gitSource devopsv1alpha3.GitSource if credential := source.SelectElement("credentialsId"); credential != nil { gitSource.CredentialId = credential.Text() } @@ -292,7 +292,7 @@ func getGitSourcefromEtree(source *etree.Element) *devops.GitSource { "jenkins.plugins.git.traits.CloneOptionTrait"); cloneTrait != nil { if cloneExtension := cloneTrait.SelectElement( "extension"); cloneExtension != nil { - gitSource.CloneOption = &devops.GitCloneOption{} + gitSource.CloneOption = &devopsv1alpha3.GitCloneOption{} if value, err := strconv.ParseBool(cloneExtension.SelectElement("shallow").Text()); err == nil { gitSource.CloneOption.Shallow = value } @@ -313,8 +313,8 @@ func getGitSourcefromEtree(source *etree.Element) *devops.GitSource { return &gitSource } -func getGithubSourcefromEtree(source *etree.Element) *devops.GithubSource { - var githubSource devops.GithubSource +func getGithubSourcefromEtree(source *etree.Element) *devopsv1alpha3.GithubSource { + var githubSource devopsv1alpha3.GithubSource if credential := source.SelectElement("credentialsId"); credential != nil { githubSource.CredentialId = credential.Text() } @@ -345,22 +345,22 @@ func getGithubSourcefromEtree(source *etree.Element) *devops.GithubSource { trust := strings.Split(trustClass, "$") switch trust[1] { case "TrustContributors": - githubSource.DiscoverPRFromForks = &devops.DiscoverPRFromForks{ + githubSource.DiscoverPRFromForks = &devopsv1alpha3.DiscoverPRFromForks{ Strategy: strategyId, Trust: 1, } case "TrustEveryone": - githubSource.DiscoverPRFromForks = &devops.DiscoverPRFromForks{ + githubSource.DiscoverPRFromForks = &devopsv1alpha3.DiscoverPRFromForks{ Strategy: strategyId, Trust: 2, } case "TrustPermission": - githubSource.DiscoverPRFromForks = &devops.DiscoverPRFromForks{ + githubSource.DiscoverPRFromForks = &devopsv1alpha3.DiscoverPRFromForks{ Strategy: strategyId, Trust: 3, } case "TrustNobody": - githubSource.DiscoverPRFromForks = &devops.DiscoverPRFromForks{ + githubSource.DiscoverPRFromForks = &devopsv1alpha3.DiscoverPRFromForks{ Strategy: strategyId, Trust: 4, } @@ -369,7 +369,7 @@ func getGithubSourcefromEtree(source *etree.Element) *devops.GithubSource { "jenkins.plugins.git.traits.CloneOptionTrait"); cloneTrait != nil { if cloneExtension := cloneTrait.SelectElement( "extension"); cloneExtension != nil { - githubSource.CloneOption = &devops.GitCloneOption{} + githubSource.CloneOption = &devopsv1alpha3.GitCloneOption{} if value, err := strconv.ParseBool(cloneExtension.SelectElement("shallow").Text()); err == nil { githubSource.CloneOption.Shallow = value } @@ -392,7 +392,7 @@ func getGithubSourcefromEtree(source *etree.Element) *devops.GithubSource { return &githubSource } -func appendGithubSourceToEtree(source *etree.Element, githubSource *devops.GithubSource) { +func appendGithubSourceToEtree(source *etree.Element, githubSource *devopsv1alpha3.GithubSource) { source.CreateAttr("class", "org.jenkinsci.plugins.github_branch_source.GitHubSCMSource") source.CreateAttr("plugin", "github-branch-source") source.CreateElement("id").SetText(githubSource.ScmId) @@ -455,8 +455,8 @@ func appendGithubSourceToEtree(source *etree.Element, githubSource *devops.Githu return } -func getBitbucketServerSourceFromEtree(source *etree.Element) *devops.BitbucketServerSource { - var s devops.BitbucketServerSource +func getBitbucketServerSourceFromEtree(source *etree.Element) *devopsv1alpha3.BitbucketServerSource { + var s devopsv1alpha3.BitbucketServerSource if credential := source.SelectElement("credentialsId"); credential != nil { s.CredentialId = credential.Text() } @@ -487,17 +487,17 @@ func getBitbucketServerSourceFromEtree(source *etree.Element) *devops.BitbucketS trust := strings.Split(trustClass, "$") switch trust[1] { case "TrustEveryone": - s.DiscoverPRFromForks = &devops.DiscoverPRFromForks{ + s.DiscoverPRFromForks = &devopsv1alpha3.DiscoverPRFromForks{ Strategy: strategyId, Trust: 1, } case "TrustTeamForks": - s.DiscoverPRFromForks = &devops.DiscoverPRFromForks{ + s.DiscoverPRFromForks = &devopsv1alpha3.DiscoverPRFromForks{ Strategy: strategyId, Trust: 2, } case "TrustNobody": - s.DiscoverPRFromForks = &devops.DiscoverPRFromForks{ + s.DiscoverPRFromForks = &devopsv1alpha3.DiscoverPRFromForks{ Strategy: strategyId, Trust: 3, } @@ -506,7 +506,7 @@ func getBitbucketServerSourceFromEtree(source *etree.Element) *devops.BitbucketS "jenkins.plugins.git.traits.CloneOptionTrait"); cloneTrait != nil { if cloneExtension := cloneTrait.SelectElement( "extension"); cloneExtension != nil { - s.CloneOption = &devops.GitCloneOption{} + s.CloneOption = &devopsv1alpha3.GitCloneOption{} if value, err := strconv.ParseBool(cloneExtension.SelectElement("shallow").Text()); err == nil { s.CloneOption.Shallow = value } @@ -529,7 +529,7 @@ func getBitbucketServerSourceFromEtree(source *etree.Element) *devops.BitbucketS return &s } -func appendBitbucketServerSourceToEtree(source *etree.Element, s *devops.BitbucketServerSource) { +func appendBitbucketServerSourceToEtree(source *etree.Element, s *devopsv1alpha3.BitbucketServerSource) { source.CreateAttr("class", "com.cloudbees.jenkins.plugins.bitbucket.BitbucketSCMSource") source.CreateAttr("plugin", "cloudbees-bitbucket-branch-source") source.CreateElement("id").SetText(s.ScmId) @@ -590,8 +590,8 @@ func appendBitbucketServerSourceToEtree(source *etree.Element, s *devops.Bitbuck return } -func getSvnSourcefromEtree(source *etree.Element) *devops.SvnSource { - var s devops.SvnSource +func getSvnSourcefromEtree(source *etree.Element) *devopsv1alpha3.SvnSource { + var s devopsv1alpha3.SvnSource if remote := source.SelectElement("remoteBase"); remote != nil { s.Remote = remote.Text() } @@ -610,7 +610,7 @@ func getSvnSourcefromEtree(source *etree.Element) *devops.SvnSource { return &s } -func appendSvnSourceToEtree(source *etree.Element, s *devops.SvnSource) { +func appendSvnSourceToEtree(source *etree.Element, s *devopsv1alpha3.SvnSource) { source.CreateAttr("class", "jenkins.scm.impl.subversion.SubversionSCMSource") source.CreateAttr("plugin", "subversion") source.CreateElement("id").SetText(s.ScmId) @@ -629,8 +629,8 @@ func appendSvnSourceToEtree(source *etree.Element, s *devops.SvnSource) { return } -func getSingleSvnSourceFromEtree(source *etree.Element) *devops.SingleSvnSource { - var s devops.SingleSvnSource +func getSingleSvnSourceFromEtree(source *etree.Element) *devopsv1alpha3.SingleSvnSource { + var s devopsv1alpha3.SingleSvnSource if scm := source.SelectElement("scm"); scm != nil { if locations := scm.SelectElement("locations"); locations != nil { if moduleLocations := locations.SelectElement("hudson.scm.SubversionSCM_-ModuleLocation"); moduleLocations != nil { @@ -646,7 +646,7 @@ func getSingleSvnSourceFromEtree(source *etree.Element) *devops.SingleSvnSource return &s } -func appendSingleSvnSourceToEtree(source *etree.Element, s *devops.SingleSvnSource) { +func appendSingleSvnSourceToEtree(source *etree.Element, s *devopsv1alpha3.SingleSvnSource) { source.CreateAttr("class", "jenkins.scm.impl.SingleSCMSource") source.CreateAttr("plugin", "scm-api") @@ -682,7 +682,7 @@ func appendSingleSvnSourceToEtree(source *etree.Element, s *devops.SingleSvnSour return } -func appendMultiBranchJobTriggerToEtree(properties *etree.Element, s *devops.MultiBranchJobTrigger) { +func appendMultiBranchJobTriggerToEtree(properties *etree.Element, s *devopsv1alpha3.MultiBranchJobTrigger) { triggerProperty := properties.CreateElement("org.jenkinsci.plugins.workflow.multibranch.PipelineTriggerProperty") triggerProperty.CreateAttr("plugin", "multibranch-action-triggers") triggerProperty.CreateElement("createActionJobsToTrigger").SetText(s.CreateActionJobsToTrigger) @@ -690,8 +690,8 @@ func appendMultiBranchJobTriggerToEtree(properties *etree.Element, s *devops.Mul return } -func getMultiBranchJobTriggerfromEtree(properties *etree.Element) *devops.MultiBranchJobTrigger { - var s devops.MultiBranchJobTrigger +func getMultiBranchJobTriggerfromEtree(properties *etree.Element) *devopsv1alpha3.MultiBranchJobTrigger { + var s devopsv1alpha3.MultiBranchJobTrigger triggerProperty := properties.SelectElement("org.jenkinsci.plugins.workflow.multibranch.PipelineTriggerProperty") if triggerProperty != nil { s.CreateActionJobsToTrigger = triggerProperty.SelectElement("createActionJobsToTrigger").Text() @@ -699,7 +699,7 @@ func getMultiBranchJobTriggerfromEtree(properties *etree.Element) *devops.MultiB } return &s } -func createMultiBranchPipelineConfigXml(projectName string, pipeline *devops.MultiBranchPipeline) (string, error) { +func createMultiBranchPipelineConfigXml(projectName string, pipeline *devopsv1alpha3.MultiBranchPipeline) (string, error) { doc := etree.NewDocument() xmlString := ` @@ -802,8 +802,8 @@ func createMultiBranchPipelineConfigXml(projectName string, pipeline *devops.Mul return replaceXmlVersion(stringXml, "1.0", "1.1"), err } -func parseMultiBranchPipelineConfigXml(config string) (*devops.MultiBranchPipeline, error) { - pipeline := &devops.MultiBranchPipeline{} +func parseMultiBranchPipelineConfigXml(config string) (*devopsv1alpha3.MultiBranchPipeline, error) { + pipeline := &devopsv1alpha3.MultiBranchPipeline{} config = replaceXmlVersion(config, "1.1", "1.0") doc := etree.NewDocument() err := doc.ReadFromString(config) @@ -823,7 +823,7 @@ func parseMultiBranchPipelineConfigXml(config string) (*devops.MultiBranchPipeli pipeline.Description = project.SelectElement("description").Text() if discarder := project.SelectElement("orphanedItemStrategy"); discarder != nil { - pipeline.Discarder = &devops.DiscarderProperty{ + pipeline.Discarder = &devopsv1alpha3.DiscarderProperty{ DaysToKeep: discarder.SelectElement("daysToKeep").Text(), NumToKeep: discarder.SelectElement("numToKeep").Text(), } @@ -831,7 +831,7 @@ func parseMultiBranchPipelineConfigXml(config string) (*devops.MultiBranchPipeli if triggers := project.SelectElement("triggers"); triggers != nil { if timerTrigger := triggers.SelectElement( "com.cloudbees.hudson.plugins.folder.computed.PeriodicFolderTrigger"); timerTrigger != nil { - pipeline.TimerTrigger = &devops.TimerTrigger{ + pipeline.TimerTrigger = &devopsv1alpha3.TimerTrigger{ Interval: timerTrigger.SelectElement("interval").Text(), } } diff --git a/pkg/simple/client/devops/jenkins/pipeline_internal_test.go b/pkg/simple/client/devops/jenkins/pipeline_internal_test.go index fe5c3382f..7bf233797 100644 --- a/pkg/simple/client/devops/jenkins/pipeline_internal_test.go +++ b/pkg/simple/client/devops/jenkins/pipeline_internal_test.go @@ -1,13 +1,13 @@ package jenkins import ( - "kubesphere.io/kubesphere/pkg/simple/client/devops" + devopsv1alpha3 "kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3" "reflect" "testing" ) func Test_NoScmPipelineConfig(t *testing.T) { - inputs := []*devops.NoScmPipeline{ + inputs := []*devopsv1alpha3.NoScmPipeline{ { Name: "", Description: "for test", @@ -42,12 +42,12 @@ func Test_NoScmPipelineConfig(t *testing.T) { } func Test_NoScmPipelineConfig_Discarder(t *testing.T) { - inputs := []*devops.NoScmPipeline{ + inputs := []*devopsv1alpha3.NoScmPipeline{ { Name: "", Description: "for test", Jenkinsfile: "node{echo 'hello'}", - Discarder: &devops.DiscarderProperty{ + Discarder: &devopsv1alpha3.DiscarderProperty{ DaysToKeep: "3", NumToKeep: "5", }, }, @@ -55,7 +55,7 @@ func Test_NoScmPipelineConfig_Discarder(t *testing.T) { Name: "", Description: "for test", Jenkinsfile: "node{echo 'hello'}", - Discarder: &devops.DiscarderProperty{ + Discarder: &devopsv1alpha3.DiscarderProperty{ DaysToKeep: "3", NumToKeep: "", }, }, @@ -63,7 +63,7 @@ func Test_NoScmPipelineConfig_Discarder(t *testing.T) { Name: "", Description: "for test", Jenkinsfile: "node{echo 'hello'}", - Discarder: &devops.DiscarderProperty{ + Discarder: &devopsv1alpha3.DiscarderProperty{ DaysToKeep: "", NumToKeep: "21321", }, }, @@ -71,7 +71,7 @@ func Test_NoScmPipelineConfig_Discarder(t *testing.T) { Name: "", Description: "for test", Jenkinsfile: "node{echo 'hello'}", - Discarder: &devops.DiscarderProperty{ + Discarder: &devopsv1alpha3.DiscarderProperty{ DaysToKeep: "", NumToKeep: "", }, }, @@ -93,13 +93,13 @@ func Test_NoScmPipelineConfig_Discarder(t *testing.T) { } func Test_NoScmPipelineConfig_Param(t *testing.T) { - inputs := []*devops.NoScmPipeline{ + inputs := []*devopsv1alpha3.NoScmPipeline{ { Name: "", Description: "for test", Jenkinsfile: "node{echo 'hello'}", - Parameters: &devops.Parameters{ - &devops.Parameter{ + Parameters: []devopsv1alpha3.Parameter{ + { Name: "d", DefaultValue: "a\nb", Type: "choice", @@ -111,26 +111,26 @@ func Test_NoScmPipelineConfig_Param(t *testing.T) { Name: "", Description: "for test", Jenkinsfile: "node{echo 'hello'}", - Parameters: &devops.Parameters{ - &devops.Parameter{ + Parameters: []devopsv1alpha3.Parameter{ + { Name: "a", DefaultValue: "abc", Type: "string", Description: "fortest", }, - &devops.Parameter{ + { Name: "b", DefaultValue: "false", Type: "boolean", Description: "fortest", }, - &devops.Parameter{ + { Name: "c", DefaultValue: "password \n aaa", Type: "text", Description: "fortest", }, - &devops.Parameter{ + { Name: "d", DefaultValue: "a\nb", Type: "choice", @@ -156,12 +156,12 @@ func Test_NoScmPipelineConfig_Param(t *testing.T) { } func Test_NoScmPipelineConfig_Trigger(t *testing.T) { - inputs := []*devops.NoScmPipeline{ + inputs := []*devopsv1alpha3.NoScmPipeline{ { Name: "", Description: "for test", Jenkinsfile: "node{echo 'hello'}", - TimerTrigger: &devops.TimerTrigger{ + TimerTrigger: &devopsv1alpha3.TimerTrigger{ Cron: "1 1 1 * * *", }, }, @@ -170,7 +170,7 @@ func Test_NoScmPipelineConfig_Trigger(t *testing.T) { Name: "", Description: "for test", Jenkinsfile: "node{echo 'hello'}", - RemoteTrigger: &devops.RemoteTrigger{ + RemoteTrigger: &devopsv1alpha3.RemoteTrigger{ Token: "abc", }, }, @@ -178,10 +178,10 @@ func Test_NoScmPipelineConfig_Trigger(t *testing.T) { Name: "", Description: "for test", Jenkinsfile: "node{echo 'hello'}", - TimerTrigger: &devops.TimerTrigger{ + TimerTrigger: &devopsv1alpha3.TimerTrigger{ Cron: "1 1 1 * * *", }, - RemoteTrigger: &devops.RemoteTrigger{ + RemoteTrigger: &devopsv1alpha3.RemoteTrigger{ Token: "abc", }, }, @@ -205,34 +205,34 @@ func Test_NoScmPipelineConfig_Trigger(t *testing.T) { func Test_MultiBranchPipelineConfig(t *testing.T) { - inputs := []*devops.MultiBranchPipeline{ + inputs := []*devopsv1alpha3.MultiBranchPipeline{ { Name: "", Description: "for test", ScriptPath: "Jenkinsfile", SourceType: "git", - GitSource: &devops.GitSource{}, + GitSource: &devopsv1alpha3.GitSource{}, }, { Name: "", Description: "for test", ScriptPath: "Jenkinsfile", SourceType: "github", - GitHubSource: &devops.GithubSource{}, + GitHubSource: &devopsv1alpha3.GithubSource{}, }, { Name: "", Description: "for test", ScriptPath: "Jenkinsfile", SourceType: "single_svn", - SingleSvnSource: &devops.SingleSvnSource{}, + SingleSvnSource: &devopsv1alpha3.SingleSvnSource{}, }, { Name: "", Description: "for test", ScriptPath: "Jenkinsfile", SourceType: "svn", - SvnSource: &devops.SvnSource{}, + SvnSource: &devopsv1alpha3.SvnSource{}, }, } for _, input := range inputs { @@ -253,17 +253,17 @@ func Test_MultiBranchPipelineConfig(t *testing.T) { func Test_MultiBranchPipelineConfig_Discarder(t *testing.T) { - inputs := []*devops.MultiBranchPipeline{ + inputs := []*devopsv1alpha3.MultiBranchPipeline{ { Name: "", Description: "for test", ScriptPath: "Jenkinsfile", SourceType: "git", - Discarder: &devops.DiscarderProperty{ + Discarder: &devopsv1alpha3.DiscarderProperty{ DaysToKeep: "1", NumToKeep: "2", }, - GitSource: &devops.GitSource{}, + GitSource: &devopsv1alpha3.GitSource{}, }, } for _, input := range inputs { @@ -283,16 +283,16 @@ func Test_MultiBranchPipelineConfig_Discarder(t *testing.T) { } func Test_MultiBranchPipelineConfig_TimerTrigger(t *testing.T) { - inputs := []*devops.MultiBranchPipeline{ + inputs := []*devopsv1alpha3.MultiBranchPipeline{ { Name: "", Description: "for test", ScriptPath: "Jenkinsfile", SourceType: "git", - TimerTrigger: &devops.TimerTrigger{ + TimerTrigger: &devopsv1alpha3.TimerTrigger{ Interval: "12345566", }, - GitSource: &devops.GitSource{}, + GitSource: &devopsv1alpha3.GitSource{}, }, } for _, input := range inputs { @@ -313,16 +313,16 @@ func Test_MultiBranchPipelineConfig_TimerTrigger(t *testing.T) { func Test_MultiBranchPipelineConfig_Source(t *testing.T) { - inputs := []*devops.MultiBranchPipeline{ + inputs := []*devopsv1alpha3.MultiBranchPipeline{ { Name: "", Description: "for test", ScriptPath: "Jenkinsfile", SourceType: "git", - TimerTrigger: &devops.TimerTrigger{ + TimerTrigger: &devopsv1alpha3.TimerTrigger{ Interval: "12345566", }, - GitSource: &devops.GitSource{ + GitSource: &devopsv1alpha3.GitSource{ Url: "https://github.com/kubesphere/devops", CredentialId: "git", DiscoverBranches: true, @@ -333,17 +333,17 @@ func Test_MultiBranchPipelineConfig_Source(t *testing.T) { Description: "for test", ScriptPath: "Jenkinsfile", SourceType: "github", - TimerTrigger: &devops.TimerTrigger{ + TimerTrigger: &devopsv1alpha3.TimerTrigger{ Interval: "12345566", }, - GitHubSource: &devops.GithubSource{ + GitHubSource: &devopsv1alpha3.GithubSource{ Owner: "kubesphere", Repo: "devops", CredentialId: "github", ApiUri: "https://api.github.com", DiscoverBranches: 1, DiscoverPRFromOrigin: 2, - DiscoverPRFromForks: &devops.DiscoverPRFromForks{ + DiscoverPRFromForks: &devopsv1alpha3.DiscoverPRFromForks{ Strategy: 1, Trust: 1, }, @@ -354,17 +354,17 @@ func Test_MultiBranchPipelineConfig_Source(t *testing.T) { Description: "for test", ScriptPath: "Jenkinsfile", SourceType: "bitbucket_server", - TimerTrigger: &devops.TimerTrigger{ + TimerTrigger: &devopsv1alpha3.TimerTrigger{ Interval: "12345566", }, - BitbucketServerSource: &devops.BitbucketServerSource{ + BitbucketServerSource: &devopsv1alpha3.BitbucketServerSource{ Owner: "kubesphere", Repo: "devops", CredentialId: "github", ApiUri: "https://api.github.com", DiscoverBranches: 1, DiscoverPRFromOrigin: 2, - DiscoverPRFromForks: &devops.DiscoverPRFromForks{ + DiscoverPRFromForks: &devopsv1alpha3.DiscoverPRFromForks{ Strategy: 1, Trust: 1, }, @@ -376,10 +376,10 @@ func Test_MultiBranchPipelineConfig_Source(t *testing.T) { Description: "for test", ScriptPath: "Jenkinsfile", SourceType: "svn", - TimerTrigger: &devops.TimerTrigger{ + TimerTrigger: &devopsv1alpha3.TimerTrigger{ Interval: "12345566", }, - SvnSource: &devops.SvnSource{ + SvnSource: &devopsv1alpha3.SvnSource{ Remote: "https://api.svn.com/bcd", CredentialId: "svn", Excludes: "truck", @@ -391,10 +391,10 @@ func Test_MultiBranchPipelineConfig_Source(t *testing.T) { Description: "for test", ScriptPath: "Jenkinsfile", SourceType: "single_svn", - TimerTrigger: &devops.TimerTrigger{ + TimerTrigger: &devopsv1alpha3.TimerTrigger{ Interval: "12345566", }, - SingleSvnSource: &devops.SingleSvnSource{ + SingleSvnSource: &devopsv1alpha3.SingleSvnSource{ Remote: "https://api.svn.com/bcd", CredentialId: "svn", }, @@ -419,17 +419,17 @@ func Test_MultiBranchPipelineConfig_Source(t *testing.T) { func Test_MultiBranchPipelineCloneConfig(t *testing.T) { - inputs := []*devops.MultiBranchPipeline{ + inputs := []*devopsv1alpha3.MultiBranchPipeline{ { Name: "", Description: "for test", ScriptPath: "Jenkinsfile", SourceType: "git", - GitSource: &devops.GitSource{ + GitSource: &devopsv1alpha3.GitSource{ Url: "https://github.com/kubesphere/devops", CredentialId: "git", DiscoverBranches: true, - CloneOption: &devops.GitCloneOption{ + CloneOption: &devopsv1alpha3.GitCloneOption{ Shallow: false, Depth: 3, Timeout: 20, @@ -441,18 +441,18 @@ func Test_MultiBranchPipelineCloneConfig(t *testing.T) { Description: "for test", ScriptPath: "Jenkinsfile", SourceType: "github", - GitHubSource: &devops.GithubSource{ + GitHubSource: &devopsv1alpha3.GithubSource{ Owner: "kubesphere", Repo: "devops", CredentialId: "github", ApiUri: "https://api.github.com", DiscoverBranches: 1, DiscoverPRFromOrigin: 2, - DiscoverPRFromForks: &devops.DiscoverPRFromForks{ + DiscoverPRFromForks: &devopsv1alpha3.DiscoverPRFromForks{ Strategy: 1, Trust: 1, }, - CloneOption: &devops.GitCloneOption{ + CloneOption: &devopsv1alpha3.GitCloneOption{ Shallow: false, Depth: 3, Timeout: 20, @@ -480,13 +480,13 @@ func Test_MultiBranchPipelineCloneConfig(t *testing.T) { func Test_MultiBranchPipelineRegexFilter(t *testing.T) { - inputs := []*devops.MultiBranchPipeline{ + inputs := []*devopsv1alpha3.MultiBranchPipeline{ { Name: "", Description: "for test", ScriptPath: "Jenkinsfile", SourceType: "git", - GitSource: &devops.GitSource{ + GitSource: &devopsv1alpha3.GitSource{ Url: "https://github.com/kubesphere/devops", CredentialId: "git", DiscoverBranches: true, @@ -498,14 +498,14 @@ func Test_MultiBranchPipelineRegexFilter(t *testing.T) { Description: "for test", ScriptPath: "Jenkinsfile", SourceType: "github", - GitHubSource: &devops.GithubSource{ + GitHubSource: &devopsv1alpha3.GithubSource{ Owner: "kubesphere", Repo: "devops", CredentialId: "github", ApiUri: "https://api.github.com", DiscoverBranches: 1, DiscoverPRFromOrigin: 2, - DiscoverPRFromForks: &devops.DiscoverPRFromForks{ + DiscoverPRFromForks: &devopsv1alpha3.DiscoverPRFromForks{ Strategy: 1, Trust: 1, }, @@ -533,26 +533,26 @@ func Test_MultiBranchPipelineRegexFilter(t *testing.T) { func Test_MultiBranchPipelineMultibranchTrigger(t *testing.T) { - inputs := []*devops.MultiBranchPipeline{ + inputs := []*devopsv1alpha3.MultiBranchPipeline{ { Name: "", Description: "for test", ScriptPath: "Jenkinsfile", SourceType: "github", - GitHubSource: &devops.GithubSource{ + GitHubSource: &devopsv1alpha3.GithubSource{ Owner: "kubesphere", Repo: "devops", CredentialId: "github", ApiUri: "https://api.github.com", DiscoverBranches: 1, DiscoverPRFromOrigin: 2, - DiscoverPRFromForks: &devops.DiscoverPRFromForks{ + DiscoverPRFromForks: &devopsv1alpha3.DiscoverPRFromForks{ Strategy: 1, Trust: 1, }, RegexFilter: ".*", }, - MultiBranchJobTrigger: &devops.MultiBranchJobTrigger{ + MultiBranchJobTrigger: &devopsv1alpha3.MultiBranchJobTrigger{ CreateActionJobsToTrigger: "abc", DeleteActionJobsToTrigger: "ddd", }, @@ -562,20 +562,20 @@ func Test_MultiBranchPipelineMultibranchTrigger(t *testing.T) { Description: "for test", ScriptPath: "Jenkinsfile", SourceType: "github", - GitHubSource: &devops.GithubSource{ + GitHubSource: &devopsv1alpha3.GithubSource{ Owner: "kubesphere", Repo: "devops", CredentialId: "github", ApiUri: "https://api.github.com", DiscoverBranches: 1, DiscoverPRFromOrigin: 2, - DiscoverPRFromForks: &devops.DiscoverPRFromForks{ + DiscoverPRFromForks: &devopsv1alpha3.DiscoverPRFromForks{ Strategy: 1, Trust: 1, }, RegexFilter: ".*", }, - MultiBranchJobTrigger: &devops.MultiBranchJobTrigger{ + MultiBranchJobTrigger: &devopsv1alpha3.MultiBranchJobTrigger{ CreateActionJobsToTrigger: "abc", }, }, @@ -584,20 +584,20 @@ func Test_MultiBranchPipelineMultibranchTrigger(t *testing.T) { Description: "for test", ScriptPath: "Jenkinsfile", SourceType: "github", - GitHubSource: &devops.GithubSource{ + GitHubSource: &devopsv1alpha3.GithubSource{ Owner: "kubesphere", Repo: "devops", CredentialId: "github", ApiUri: "https://api.github.com", DiscoverBranches: 1, DiscoverPRFromOrigin: 2, - DiscoverPRFromForks: &devops.DiscoverPRFromForks{ + DiscoverPRFromForks: &devopsv1alpha3.DiscoverPRFromForks{ Strategy: 1, Trust: 1, }, RegexFilter: ".*", }, - MultiBranchJobTrigger: &devops.MultiBranchJobTrigger{ + MultiBranchJobTrigger: &devopsv1alpha3.MultiBranchJobTrigger{ DeleteActionJobsToTrigger: "ddd", }, }, diff --git a/pkg/simple/client/devops/jenkins/project_pipeline.go b/pkg/simple/client/devops/jenkins/project_pipeline.go index 4a28028c4..29a3807fe 100644 --- a/pkg/simple/client/devops/jenkins/project_pipeline.go +++ b/pkg/simple/client/devops/jenkins/project_pipeline.go @@ -4,20 +4,21 @@ import ( "fmt" "github.com/emicklei/go-restful" "k8s.io/klog" + devopsv1alpha3 "kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3" "kubesphere.io/kubesphere/pkg/simple/client/devops" "net/http" ) -func (j *Jenkins) CreateProjectPipeline(projectId string, pipeline *devops.ProjectPipeline) (string, error) { - switch pipeline.Type { - case devops.NoScmPipelineType: +func (j *Jenkins) CreateProjectPipeline(projectId string, pipeline *devopsv1alpha3.Pipeline) (string, error) { + switch pipeline.Spec.Type { + case devopsv1alpha3.NoScmPipelineType: - config, err := createPipelineConfigXml(pipeline.Pipeline) + config, err := createPipelineConfigXml(pipeline.Spec.Pipeline) if err != nil { return "", restful.NewError(http.StatusInternalServerError, err.Error()) } - job, err := j.GetJob(pipeline.Pipeline.Name, projectId) + job, err := j.GetJob(pipeline.Name, projectId) if job != nil { err := fmt.Errorf("job name [%s] has been used", job.GetName()) return "", restful.NewError(http.StatusConflict, err.Error()) @@ -27,19 +28,19 @@ func (j *Jenkins) CreateProjectPipeline(projectId string, pipeline *devops.Proje return "", restful.NewError(devops.GetDevOpsStatusCode(err), err.Error()) } - _, err = j.CreateJobInFolder(config, pipeline.Pipeline.Name, projectId) + _, err = j.CreateJobInFolder(config, pipeline.Name, projectId) if err != nil { return "", restful.NewError(devops.GetDevOpsStatusCode(err), err.Error()) } - return pipeline.Pipeline.Name, nil - case devops.MultiBranchPipelineType: - config, err := createMultiBranchPipelineConfigXml(projectId, pipeline.MultiBranchPipeline) + return pipeline.Name, nil + case devopsv1alpha3.MultiBranchPipelineType: + config, err := createMultiBranchPipelineConfigXml(projectId, pipeline.Spec.MultiBranchPipeline) if err != nil { return "", restful.NewError(http.StatusInternalServerError, err.Error()) } - job, err := j.GetJob(pipeline.MultiBranchPipeline.Name, projectId) + job, err := j.GetJob(pipeline.Name, projectId) if job != nil { err := fmt.Errorf("job name [%s] has been used", job.GetName()) return "", restful.NewError(http.StatusConflict, err.Error()) @@ -49,12 +50,12 @@ func (j *Jenkins) CreateProjectPipeline(projectId string, pipeline *devops.Proje return "", restful.NewError(devops.GetDevOpsStatusCode(err), err.Error()) } - _, err = j.CreateJobInFolder(config, pipeline.MultiBranchPipeline.Name, projectId) + _, err = j.CreateJobInFolder(config, pipeline.Name, projectId) if err != nil { return "", restful.NewError(devops.GetDevOpsStatusCode(err), err.Error()) } - return pipeline.MultiBranchPipeline.Name, nil + return pipeline.Name, nil default: err := fmt.Errorf("error unsupport job type") @@ -71,16 +72,16 @@ func (j *Jenkins) DeleteProjectPipeline(projectId string, pipelineId string) (st return pipelineId, nil } -func (j *Jenkins) UpdateProjectPipeline(projectId string, pipeline *devops.ProjectPipeline) (string, error) { - switch pipeline.Type { - case devops.NoScmPipelineType: +func (j *Jenkins) UpdateProjectPipeline(projectId string, pipeline *devopsv1alpha3.Pipeline) (string, error) { + switch pipeline.Spec.Type { + case devopsv1alpha3.NoScmPipelineType: - config, err := createPipelineConfigXml(pipeline.Pipeline) + config, err := createPipelineConfigXml(pipeline.Spec.Pipeline) if err != nil { return "", restful.NewError(http.StatusInternalServerError, err.Error()) } - job, err := j.GetJob(pipeline.Pipeline.Name, projectId) + job, err := j.GetJob(pipeline.Name, projectId) if err != nil { return "", restful.NewError(devops.GetDevOpsStatusCode(err), err.Error()) @@ -91,17 +92,17 @@ func (j *Jenkins) UpdateProjectPipeline(projectId string, pipeline *devops.Proje return "", restful.NewError(devops.GetDevOpsStatusCode(err), err.Error()) } - return pipeline.Pipeline.Name, nil - case devops.MultiBranchPipelineType: + return pipeline.Name, nil + case devopsv1alpha3.MultiBranchPipelineType: - config, err := createMultiBranchPipelineConfigXml(projectId, pipeline.MultiBranchPipeline) + config, err := createMultiBranchPipelineConfigXml(projectId, pipeline.Spec.MultiBranchPipeline) if err != nil { klog.Errorf("%+v", err) return "", restful.NewError(http.StatusInternalServerError, err.Error()) } - job, err := j.GetJob(pipeline.MultiBranchPipeline.Name, projectId) + job, err := j.GetJob(pipeline.Spec.MultiBranchPipeline.Name, projectId) if err != nil { return "", restful.NewError(devops.GetDevOpsStatusCode(err), err.Error()) @@ -113,7 +114,7 @@ func (j *Jenkins) UpdateProjectPipeline(projectId string, pipeline *devops.Proje return "", restful.NewError(devops.GetDevOpsStatusCode(err), err.Error()) } - return pipeline.MultiBranchPipeline.Name, nil + return pipeline.Name, nil default: err := fmt.Errorf("error unsupport job type") @@ -122,7 +123,7 @@ func (j *Jenkins) UpdateProjectPipeline(projectId string, pipeline *devops.Proje } } -func (j *Jenkins) GetProjectPipelineConfig(projectId, pipelineId string) (*devops.ProjectPipeline, error) { +func (j *Jenkins) GetProjectPipelineConfig(projectId, pipelineId string) (*devopsv1alpha3.Pipeline, error) { job, err := j.GetJob(pipelineId, projectId) if err != nil { klog.Errorf("%+v", err) @@ -139,9 +140,11 @@ func (j *Jenkins) GetProjectPipelineConfig(projectId, pipelineId string) (*devop return nil, restful.NewError(devops.GetDevOpsStatusCode(err), err.Error()) } pipeline.Name = pipelineId - return &devops.ProjectPipeline{ - Type: devops.NoScmPipelineType, - Pipeline: pipeline, + return &devopsv1alpha3.Pipeline{ + Spec: devopsv1alpha3.PipelineSpec{ + Type: devopsv1alpha3.NoScmPipelineType, + Pipeline: pipeline, + }, }, nil case "org.jenkinsci.plugins.workflow.multibranch.WorkflowMultiBranchProject": @@ -154,9 +157,11 @@ func (j *Jenkins) GetProjectPipelineConfig(projectId, pipelineId string) (*devop return nil, restful.NewError(devops.GetDevOpsStatusCode(err), err.Error()) } pipeline.Name = pipelineId - return &devops.ProjectPipeline{ - Type: devops.MultiBranchPipelineType, - MultiBranchPipeline: pipeline, + return &devopsv1alpha3.Pipeline{ + Spec: devopsv1alpha3.PipelineSpec{ + Type: devopsv1alpha3.MultiBranchPipelineType, + MultiBranchPipeline: pipeline, + }, }, nil default: klog.Errorf("%+v", err) diff --git a/pkg/simple/client/devops/project_pipeline.go b/pkg/simple/client/devops/project_pipeline.go index 587b037ce..f5610fd5b 100644 --- a/pkg/simple/client/devops/project_pipeline.go +++ b/pkg/simple/client/devops/project_pipeline.go @@ -1,144 +1,10 @@ package devops -const ( - NoScmPipelineType = "pipeline" - MultiBranchPipelineType = "multi-branch-pipeline" -) - -type Parameters []*Parameter - -var ParameterTypeMap = map[string]string{ - "hudson.model.StringParameterDefinition": "string", - "hudson.model.ChoiceParameterDefinition": "choice", - "hudson.model.TextParameterDefinition": "text", - "hudson.model.BooleanParameterDefinition": "boolean", - "hudson.model.FileParameterDefinition": "file", - "hudson.model.PasswordParameterDefinition": "password", -} - -type ProjectPipeline struct { - Type string `json:"type" description:"type of devops pipeline, in scm or no scm"` - Pipeline *NoScmPipeline `json:"pipeline,omitempty" description:"no scm pipeline structs"` - MultiBranchPipeline *MultiBranchPipeline `json:"multi_branch_pipeline,omitempty" description:"in scm pipeline structs"` -} - -type NoScmPipeline struct { - Name string `json:"name" description:"name of pipeline"` - Description string `json:"descriptio,omitempty" description:"description of pipeline"` - Discarder *DiscarderProperty `json:"discarder,omitempty" description:"Discarder of pipeline, managing when to drop a pipeline"` - Parameters *Parameters `json:"parameters,omitempty" description:"Parameters define of pipeline,user could pass param when run pipeline"` - DisableConcurrent bool `json:"disable_concurrent,omitempty" mapstructure:"disable_concurrent" description:"Whether to prohibit the pipeline from running in parallel"` - TimerTrigger *TimerTrigger `json:"timer_trigger,omitempty" mapstructure:"timer_trigger" description:"Timer to trigger pipeline run"` - RemoteTrigger *RemoteTrigger `json:"remote_trigger,omitempty" mapstructure:"remote_trigger" description:"Remote api define to trigger pipeline run"` - Jenkinsfile string `json:"jenkinsfile,omitempty" description:"Jenkinsfile's content'"` -} - -type MultiBranchPipeline struct { - Name string `json:"name" description:"name of pipeline"` - Description string `json:"descriptio,omitempty" description:"description of pipeline"` - Discarder *DiscarderProperty `json:"discarder,omitempty" description:"Discarder of pipeline, managing when to drop a pipeline"` - TimerTrigger *TimerTrigger `json:"timer_trigger,omitempty" mapstructure:"timer_trigger" description:"Timer to trigger pipeline run"` - SourceType string `json:"source_type" description:"type of scm, such as github/git/svn"` - GitSource *GitSource `json:"git_source,omitempty" description:"git scm define"` - GitHubSource *GithubSource `json:"github_source,omitempty" description:"github scm define"` - SvnSource *SvnSource `json:"svn_source,omitempty" description:"multi branch svn scm define"` - SingleSvnSource *SingleSvnSource `json:"single_svn_source,omitempty" description:"single branch svn scm define"` - BitbucketServerSource *BitbucketServerSource `json:"bitbucket_server_source,omitempty" description:"bitbucket server scm defile"` - ScriptPath string `json:"script_path" mapstructure:"script_path" description:"script path in scm"` - MultiBranchJobTrigger *MultiBranchJobTrigger `json:"multibranch_job_trigger,omitempty" mapstructure:"multibranch_job_trigger" description:"Pipeline tasks that need to be triggered when branch creation/deletion"` -} - -type GitSource struct { - ScmId string `json:"scm_id,omitempty" description:"uid of scm"` - Url string `json:"url,omitempty" mapstructure:"url" description:"url of git source"` - CredentialId string `json:"credential_id,omitempty" mapstructure:"credential_id" description:"credential id to access git source"` - DiscoverBranches bool `json:"discover_branches,omitempty" mapstructure:"discover_branches" description:"Whether to discover a branch"` - CloneOption *GitCloneOption `json:"git_clone_option,omitempty" mapstructure:"git_clone_option" description:"advavced git clone options"` - RegexFilter string `json:"regex_filter,omitempty" mapstructure:"regex_filter" description:"Regex used to match the name of the branch that needs to be run"` -} - -type GithubSource struct { - ScmId string `json:"scm_id,omitempty" description:"uid of scm"` - Owner string `json:"owner,omitempty" mapstructure:"owner" description:"owner of github repo"` - Repo string `json:"repo,omitempty" mapstructure:"repo" description:"repo name of github repo"` - CredentialId string `json:"credential_id,omitempty" mapstructure:"credential_id" description:"credential id to access github source"` - ApiUri string `json:"api_uri,omitempty" mapstructure:"api_uri" description:"The api url can specify the location of the github apiserver.For private cloud configuration"` - DiscoverBranches int `json:"discover_branches,omitempty" mapstructure:"discover_branches" description:"Discover branch configuration"` - DiscoverPRFromOrigin int `json:"discover_pr_from_origin,omitempty" mapstructure:"discover_pr_from_origin" description:"Discover origin PR configuration"` - DiscoverPRFromForks *DiscoverPRFromForks `json:"discover_pr_from_forks,omitempty" mapstructure:"discover_pr_from_forks" description:"Discover fork PR configuration"` - CloneOption *GitCloneOption `json:"git_clone_option,omitempty" mapstructure:"git_clone_option" description:"advavced git clone options"` - RegexFilter string `json:"regex_filter,omitempty" mapstructure:"regex_filter" description:"Regex used to match the name of the branch that needs to be run"` -} - -type MultiBranchJobTrigger struct { - CreateActionJobsToTrigger string `json:"create_action_job_to_trigger,omitempty" description:"pipeline name to trigger"` - DeleteActionJobsToTrigger string `json:"delete_action_job_to_trigger,omitempty" description:"pipeline name to trigger"` -} - -type BitbucketServerSource struct { - ScmId string `json:"scm_id,omitempty" description:"uid of scm"` - Owner string `json:"owner,omitempty" mapstructure:"owner" description:"owner of github repo"` - Repo string `json:"repo,omitempty" mapstructure:"repo" description:"repo name of github repo"` - CredentialId string `json:"credential_id,omitempty" mapstructure:"credential_id" description:"credential id to access github source"` - ApiUri string `json:"api_uri,omitempty" mapstructure:"api_uri" description:"The api url can specify the location of the github apiserver.For private cloud configuration"` - DiscoverBranches int `json:"discover_branches,omitempty" mapstructure:"discover_branches" description:"Discover branch configuration"` - DiscoverPRFromOrigin int `json:"discover_pr_from_origin,omitempty" mapstructure:"discover_pr_from_origin" description:"Discover origin PR configuration"` - DiscoverPRFromForks *DiscoverPRFromForks `json:"discover_pr_from_forks,omitempty" mapstructure:"discover_pr_from_forks" description:"Discover fork PR configuration"` - CloneOption *GitCloneOption `json:"git_clone_option,omitempty" mapstructure:"git_clone_option" description:"advavced git clone options"` - RegexFilter string `json:"regex_filter,omitempty" mapstructure:"regex_filter" description:"Regex used to match the name of the branch that needs to be run"` -} - -type GitCloneOption struct { - Shallow bool `json:"shallow,omitempty" mapstructure:"shallow" description:"Whether to use git shallow clone"` - Timeout int `json:"timeout,omitempty" mapstructure:"timeout" description:"git clone timeout mins"` - Depth int `json:"depth,omitempty" mapstructure:"depth" description:"git clone depth"` -} - -type SvnSource struct { - ScmId string `json:"scm_id,omitempty" description:"uid of scm"` - Remote string `json:"remote,omitempty" description:"remote address url"` - CredentialId string `json:"credential_id,omitempty" mapstructure:"credential_id" description:"credential id to access svn source"` - Includes string `json:"includes,omitempty" description:"branches to run pipeline"` - Excludes string `json:"excludes,omitempty" description:"branches do not run pipeline"` -} -type SingleSvnSource struct { - ScmId string `json:"scm_id,omitempty" description:"uid of scm"` - Remote string `json:"remote,omitempty" description:"remote address url"` - CredentialId string `json:"credential_id,omitempty" mapstructure:"credential_id" description:"credential id to access svn source"` -} - -type DiscoverPRFromForks struct { - Strategy int `json:"strategy,omitempty" mapstructure:"strategy" description:"github discover strategy"` - Trust int `json:"trust,omitempty" mapstructure:"trust" description:"trust user type"` -} - -type DiscarderProperty struct { - DaysToKeep string `json:"days_to_keep,omitempty" mapstructure:"days_to_keep" description:"days to keep pipeline"` - NumToKeep string `json:"num_to_keep,omitempty" mapstructure:"num_to_keep" description:"nums to keep pipeline"` -} - -type Parameter struct { - Name string `json:"name" description:"name of param"` - DefaultValue string `json:"default_value,omitempty" mapstructure:"default_value" description:"default value of param"` - Type string `json:"type" description:"type of param"` - Description string `json:"description,omitempty" description:"description of pipeline"` -} - -type TimerTrigger struct { - // user in no scm job - Cron string `json:"cron,omitempty" description:"jenkins cron script"` - - // use in multi-branch job - Interval string `json:"interval,omitempty" description:"interval ms"` -} - -type RemoteTrigger struct { - Token string `json:"token,omitempty" description:"remote trigger token"` -} +import "kubesphere.io/kubesphere/pkg/apis/devops/v1alpha3" type ProjectPipelineOperator interface { - CreateProjectPipeline(projectId string, pipeline *ProjectPipeline) (string, error) + CreateProjectPipeline(projectId string, pipeline *v1alpha3.Pipeline) (string, error) DeleteProjectPipeline(projectId string, pipelineId string) (string, error) - UpdateProjectPipeline(projectId string, pipeline *ProjectPipeline) (string, error) - GetProjectPipelineConfig(projectId, pipelineId string) (*ProjectPipeline, error) + UpdateProjectPipeline(projectId string, pipeline *v1alpha3.Pipeline) (string, error) + GetProjectPipelineConfig(projectId, pipelineId string) (*v1alpha3.Pipeline, error) }