k8s.io/test-infra@v0.0.0-20240520184403-27c6b4c223d8/config/prow/cluster/ghproxy.yaml (about)

     1  # Copyright 2018 The Kubernetes Authors All rights reserved.
     2  #
     3  # Licensed under the Apache License, Version 2.0 (the "License");
     4  # you may not use this file except in compliance with the License.
     5  # You may obtain a copy of the License at
     6  #
     7  #     http://www.apache.org/licenses/LICENSE-2.0
     8  #
     9  # Unless required by applicable law or agreed to in writing, software
    10  # distributed under the License is distributed on an "AS IS" BASIS,
    11  # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    12  # See the License for the specific language governing permissions and
    13  # limitations under the License.
    14  
    15  kind: PersistentVolumeClaim
    16  apiVersion: v1
    17  metadata:
    18    namespace: default
    19    labels:
    20      app: ghproxy
    21    name: ghproxy
    22  spec:
    23    accessModes:
    24      - ReadWriteOnce
    25    resources:
    26      requests:
    27        storage: 100Gi
    28    # gce-ssd-retain is specified in config/prow/cluster/gce-ssd-retain_storageclass.yaml
    29    #
    30    # If you are setting up your own Prow instance you can do any of the following:
    31    # 1) Delete this to use the default storage class for your cluster.
    32    # 2) Specify your own storage class.
    33    # 3) If you are using GKE you can use the gce-ssd-retain storage class. It can be
    34    #    created with: `kubectl create -f config/prow/cluster/gce-ssd-retain_storageclass.yaml
    35    storageClassName: gce-ssd-retain
    36  ---
    37  apiVersion: apps/v1
    38  kind: Deployment
    39  metadata:
    40    namespace: default
    41    name: ghproxy
    42    labels:
    43      app: ghproxy
    44  spec:
    45    selector:
    46      matchLabels:
    47        app: ghproxy
    48    replicas: 1  # TODO(fejta): this should be HA
    49    template:
    50      metadata:
    51        labels:
    52          app: ghproxy
    53      spec:
    54        containers:
    55        - name: ghproxy
    56          image: gcr.io/k8s-prow/ghproxy:v20240517-ea10bd814
    57          args:
    58          - --cache-dir=/cache
    59          - --cache-sizeGB=99
    60          - --push-gateway=pushgateway
    61          - --serve-metrics=true
    62          ports:
    63          - name: main
    64            containerPort: 8888
    65          - name: metrics
    66            containerPort: 9090
    67          volumeMounts:
    68          - name: cache
    69            mountPath: /cache
    70        volumes:
    71        - name: cache
    72          persistentVolumeClaim:
    73            claimName: ghproxy
    74        # run on our dedicated node
    75        tolerations:
    76        - key: "dedicated"
    77          operator: "Equal"
    78          value: "ghproxy"
    79          effect: "NoSchedule"
    80        nodeSelector:
    81          dedicated: "ghproxy"
    82  ---
    83  apiVersion: v1
    84  kind: Service
    85  metadata:
    86    labels:
    87      app: ghproxy
    88    namespace: default
    89    name: ghproxy
    90  spec:
    91    ports:
    92    - name: main
    93      port: 80
    94      protocol: TCP
    95      targetPort: 8888
    96    - name: metrics
    97      port: 9090
    98    selector:
    99      app: ghproxy
   100    type: ClusterIP