Windows NT KAMIDAKI 10.0 build 19045 (Windows 10) AMD64
Apache/2.4.58 (Win64) OpenSSL/3.1.3 PHP/8.3.9
Server IP : 192.168.3.16 & Your IP : 216.73.216.187
Domains :
Cant Read [ /etc/named.conf ]
User : SISTEMA
Terminal
Auto Root
Create File
Create Folder
Localroot Suggester
Backdoor Destroyer
Readme
C: /
Users /
VEGETA /
Envs /
Pessoais /
Scripts /
Delete
Unzip
Name
Size
Permission
Date
Action
__pycache__
[ DIR ]
drwxrwxrwx
2023-10-10 16:09
activate
2.1
KB
-rw-rw-rw-
2023-06-29 03:40
activate.bat
1005
B
-rwxrwxrwx
2023-06-29 03:40
activate.fish
2.96
KB
-rw-rw-rw-
2023-06-29 03:40
activate.nu
2.52
KB
-rw-rw-rw-
2023-06-29 03:40
activate.ps1
1.72
KB
-rw-rw-rw-
2023-06-29 03:40
activate_this.py
1.14
KB
-rw-rw-rw-
2023-06-29 03:40
ansi2html.exe
105.86
KB
-rwxrwxrwx
2023-06-29 04:17
beeline
1.06
KB
-rw-rw-rw-
2023-10-10 16:09
beeline.cmd
1.04
KB
-rwxrwxrwx
2023-10-10 16:09
chardetect.exe
105.87
KB
-rwxrwxrwx
2023-11-14 14:19
dash-generate-components.exe
105.88
KB
-rwxrwxrwx
2023-06-29 04:18
dash-update-components.exe
105.88
KB
-rwxrwxrwx
2023-06-29 04:18
deactivate.bat
511
B
-rwxrwxrwx
2023-06-29 03:40
deactivate.nu
682
B
-rw-rw-rw-
2023-06-29 03:40
django-admin.exe
105.91
KB
-rwxrwxrwx
2024-02-25 20:38
docker-image-tool.sh
10.73
KB
-rw-rw-rw-
2023-10-10 16:09
f2py.exe
105.86
KB
-rwxrwxrwx
2023-06-29 03:41
find-spark-home
1.89
KB
-rw-rw-rw-
2023-10-10 16:09
find-spark-home.cmd
2.62
KB
-rwxrwxrwx
2023-10-10 16:09
find_spark_home.py
4.1
KB
-rw-rw-rw-
2023-10-10 16:09
flask.exe
105.85
KB
-rwxrwxrwx
2023-06-29 04:18
fonttools.exe
105.86
KB
-rwxrwxrwx
2023-06-29 03:41
game.exe
105.85
KB
-rwxrwxrwx
2024-03-11 21:24
google
4.63
KB
-rw-rw-rw-
2023-11-15 23:29
icalendar.exe
105.86
KB
-rwxrwxrwx
2024-02-25 20:37
load-spark-env.cmd
2.28
KB
-rwxrwxrwx
2023-10-10 16:09
load-spark-env.sh
2.62
KB
-rw-rw-rw-
2023-10-10 16:09
mss.exe
105.86
KB
-rwxrwxrwx
2023-09-21 16:50
normalizer.exe
105.89
KB
-rwxrwxrwx
2023-06-29 04:17
pip-3.11.exe
105.87
KB
-rwxrwxrwx
2023-06-29 03:40
pip.exe
105.87
KB
-rwxrwxrwx
2023-06-29 03:40
pip3.11.exe
105.87
KB
-rwxrwxrwx
2023-06-29 03:40
pip3.exe
105.87
KB
-rwxrwxrwx
2023-06-29 03:40
pydoc.bat
24
B
-rwxrwxrwx
2023-06-29 03:40
pyfiglet.exe
105.85
KB
-rwxrwxrwx
2024-02-26 00:55
pyftmerge.exe
105.86
KB
-rwxrwxrwx
2023-06-29 03:41
pyftsubset.exe
105.86
KB
-rwxrwxrwx
2023-06-29 03:41
pyspark
2.57
KB
-rw-rw-rw-
2023-10-10 16:09
pyspark.cmd
1.14
KB
-rwxrwxrwx
2023-10-10 16:09
pyspark2.cmd
1.51
KB
-rwxrwxrwx
2023-10-10 16:09
python.exe
264.27
KB
-rwxrwxrwx
2023-06-29 03:40
pythonw.exe
253.27
KB
-rwxrwxrwx
2023-06-29 03:40
renderer.exe
105.88
KB
-rwxrwxrwx
2023-06-29 04:18
run-example
1.01
KB
-rw-rw-rw-
2023-10-10 16:09
run-example.cmd
1.19
KB
-rwxrwxrwx
2023-10-10 16:09
scapy.exe
105.86
KB
-rwxrwxrwx
2023-11-20 00:59
spark-class
3.48
KB
-rw-rw-rw-
2023-10-10 16:09
spark-class.cmd
1.15
KB
-rwxrwxrwx
2023-10-10 16:09
spark-class2.cmd
2.82
KB
-rwxrwxrwx
2023-10-10 16:09
spark-connect-shell
1.13
KB
-rw-rw-rw-
2023-10-10 16:09
spark-shell
3.05
KB
-rw-rw-rw-
2023-10-10 16:09
spark-shell.cmd
1.15
KB
-rwxrwxrwx
2023-10-10 16:09
spark-shell2.cmd
1.78
KB
-rwxrwxrwx
2023-10-10 16:09
spark-sql
1.04
KB
-rw-rw-rw-
2023-10-10 16:09
spark-sql.cmd
1.15
KB
-rwxrwxrwx
2023-10-10 16:09
spark-sql2.cmd
1.09
KB
-rwxrwxrwx
2023-10-10 16:09
spark-submit
1.02
KB
-rw-rw-rw-
2023-10-10 16:09
spark-submit.cmd
1.15
KB
-rwxrwxrwx
2023-10-10 16:09
spark-submit2.cmd
1.13
KB
-rwxrwxrwx
2023-10-10 16:09
sparkR
1.02
KB
-rw-rw-rw-
2023-10-10 16:09
sparkR.cmd
1.14
KB
-rwxrwxrwx
2023-10-10 16:09
sparkR2.cmd
1.07
KB
-rwxrwxrwx
2023-10-10 16:09
sqlformat.exe
105.86
KB
-rwxrwxrwx
2024-02-25 20:37
tabulate.exe
105.85
KB
-rwxrwxrwx
2024-03-12 19:19
telnetlib3-client.exe
105.86
KB
-rwxrwxrwx
2023-09-21 15:21
telnetlib3-server.exe
105.86
KB
-rwxrwxrwx
2023-09-21 15:21
translate
1.39
KB
-rw-rw-rw-
2023-11-14 14:19
translate-cli.exe
105.86
KB
-rwxrwxrwx
2023-11-15 23:30
ttx.exe
105.86
KB
-rwxrwxrwx
2023-06-29 03:41
wheel-3.11.exe
105.85
KB
-rwxrwxrwx
2023-06-29 03:40
wheel.exe
105.85
KB
-rwxrwxrwx
2023-06-29 03:40
wheel3.11.exe
105.85
KB
-rwxrwxrwx
2023-06-29 03:40
wheel3.exe
105.85
KB
-rwxrwxrwx
2023-06-29 03:40
Save
Rename
#!/usr/bin/env bash # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # This script builds and pushes docker images when run from a release of Spark # with Kubernetes support. function error { echo "$@" 1>&2 exit 1 } if [ -z "${SPARK_HOME}" ]; then SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)" fi . "${SPARK_HOME}/bin/load-spark-env.sh" CTX_DIR="$SPARK_HOME/target/tmp/docker" function is_dev_build { [ ! -f "$SPARK_HOME/RELEASE" ] } function cleanup_ctx_dir { if is_dev_build; then rm -rf "$CTX_DIR" fi } trap cleanup_ctx_dir EXIT function image_ref { local image="$1" local add_repo="${2:-1}" if [ $add_repo = 1 ] && [ -n "$REPO" ]; then image="$REPO/$image" fi if [ -n "$TAG" ]; then image="$image:$TAG" fi echo "$image" } function docker_push { local image_name="$1" if [ ! -z $(docker images -q "$(image_ref ${image_name})") ]; then docker push "$(image_ref ${image_name})" if [ $? -ne 0 ]; then error "Failed to push $image_name Docker image." fi else echo "$(image_ref ${image_name}) image not found. Skipping push for this image." fi } function resolve_file { local FILE=$1 if [ -n "$FILE" ]; then local DIR=$(dirname $FILE) DIR=$(cd $DIR && pwd) FILE="${DIR}/$(basename $FILE)" fi echo $FILE } # Create a smaller build context for docker in dev builds to make the build faster. Docker # uploads all of the current directory to the daemon, and it can get pretty big with dev # builds that contain test log files and other artifacts. # # Three build contexts are created, one for each image: base, pyspark, and sparkr. For them # to have the desired effect, the docker command needs to be executed inside the appropriate # context directory. # # Note: docker does not support symlinks in the build context. function create_dev_build_context {( set -e local BASE_CTX="$CTX_DIR/base" mkdir -p "$BASE_CTX/kubernetes" cp -r "resource-managers/kubernetes/docker/src/main/dockerfiles" \ "$BASE_CTX/kubernetes/dockerfiles" cp -r "assembly/target/scala-$SPARK_SCALA_VERSION/jars" "$BASE_CTX/jars" cp -r "resource-managers/kubernetes/integration-tests/tests" \ "$BASE_CTX/kubernetes/tests" mkdir "$BASE_CTX/examples" cp -r "examples/src" "$BASE_CTX/examples/src" # Copy just needed examples jars instead of everything. mkdir "$BASE_CTX/examples/jars" for i in examples/target/scala-$SPARK_SCALA_VERSION/jars/*; do if [ ! -f "$BASE_CTX/jars/$(basename $i)" ]; then cp $i "$BASE_CTX/examples/jars" fi done for other in bin sbin data; do cp -r "$other" "$BASE_CTX/$other" done local PYSPARK_CTX="$CTX_DIR/pyspark" mkdir -p "$PYSPARK_CTX/kubernetes" cp -r "resource-managers/kubernetes/docker/src/main/dockerfiles" \ "$PYSPARK_CTX/kubernetes/dockerfiles" mkdir "$PYSPARK_CTX/python" cp -r "python/lib" "$PYSPARK_CTX/python/lib" cp -r "python/pyspark" "$PYSPARK_CTX/python/pyspark" local R_CTX="$CTX_DIR/sparkr" mkdir -p "$R_CTX/kubernetes" cp -r "resource-managers/kubernetes/docker/src/main/dockerfiles" \ "$R_CTX/kubernetes/dockerfiles" cp -r "R" "$R_CTX/R" )} function img_ctx_dir { if is_dev_build; then echo "$CTX_DIR/$1" else echo "$SPARK_HOME" fi } function build { local BUILD_ARGS local SPARK_ROOT="$SPARK_HOME" if is_dev_build; then create_dev_build_context || error "Failed to create docker build context." SPARK_ROOT="$CTX_DIR/base" fi # Verify that the Docker image content directory is present if [ ! -d "$SPARK_ROOT/kubernetes/dockerfiles" ]; then error "Cannot find docker image. This script must be run from a runnable distribution of Apache Spark." fi # Verify that Spark has actually been built/is a runnable distribution # i.e. the Spark JARs that the Docker files will place into the image are present local TOTAL_JARS=$(ls $SPARK_ROOT/jars/spark-* | wc -l) TOTAL_JARS=$(( $TOTAL_JARS )) if [ "${TOTAL_JARS}" -eq 0 ]; then error "Cannot find Spark JARs. This script assumes that Apache Spark has first been built locally or this is a runnable distribution." fi local BUILD_ARGS=(${BUILD_PARAMS}) # If a custom SPARK_UID was set add it to build arguments if [ -n "$SPARK_UID" ]; then BUILD_ARGS+=(--build-arg spark_uid=$SPARK_UID) fi local BINDING_BUILD_ARGS=( ${BUILD_ARGS[@]} --build-arg base_img=$(image_ref spark) ) local BASEDOCKERFILE=${BASEDOCKERFILE:-"kubernetes/dockerfiles/spark/Dockerfile"} local PYDOCKERFILE=${PYDOCKERFILE:-false} local RDOCKERFILE=${RDOCKERFILE:-false} local ARCHS=${ARCHS:-"--platform linux/amd64,linux/arm64"} (cd $(img_ctx_dir base) && docker build $NOCACHEARG "${BUILD_ARGS[@]}" \ -t $(image_ref spark) \ -f "$BASEDOCKERFILE" .) if [ $? -ne 0 ]; then error "Failed to build Spark JVM Docker image, please refer to Docker build output for details." fi if [ "${CROSS_BUILD}" != "false" ]; then (cd $(img_ctx_dir base) && docker buildx build $ARCHS $NOCACHEARG "${BUILD_ARGS[@]}" --push --provenance=false \ -t $(image_ref spark) \ -f "$BASEDOCKERFILE" .) fi if [ "${PYDOCKERFILE}" != "false" ]; then (cd $(img_ctx_dir pyspark) && docker build $NOCACHEARG "${BINDING_BUILD_ARGS[@]}" \ -t $(image_ref spark-py) \ -f "$PYDOCKERFILE" .) if [ $? -ne 0 ]; then error "Failed to build PySpark Docker image, please refer to Docker build output for details." fi if [ "${CROSS_BUILD}" != "false" ]; then (cd $(img_ctx_dir pyspark) && docker buildx build $ARCHS $NOCACHEARG "${BINDING_BUILD_ARGS[@]}" --push --provenance=false \ -t $(image_ref spark-py) \ -f "$PYDOCKERFILE" .) fi fi if [ "${RDOCKERFILE}" != "false" ]; then (cd $(img_ctx_dir sparkr) && docker build $NOCACHEARG "${BINDING_BUILD_ARGS[@]}" \ -t $(image_ref spark-r) \ -f "$RDOCKERFILE" .) if [ $? -ne 0 ]; then error "Failed to build SparkR Docker image, please refer to Docker build output for details." fi if [ "${CROSS_BUILD}" != "false" ]; then (cd $(img_ctx_dir sparkr) && docker buildx build $ARCHS $NOCACHEARG "${BINDING_BUILD_ARGS[@]}" --push --provenance=false \ -t $(image_ref spark-r) \ -f "$RDOCKERFILE" .) fi fi } function push { docker_push "spark" docker_push "spark-py" docker_push "spark-r" } function usage { cat <<EOF Usage: $0 [options] [command] Builds or pushes the built-in Spark Docker image. Commands: build Build image. Requires a repository address to be provided if the image will be pushed to a different registry. push Push a pre-built image to a registry. Requires a repository address to be provided. Options: -f file (Optional) Dockerfile to build for JVM based Jobs. By default builds the Dockerfile shipped with Spark. -p file (Optional) Dockerfile to build for PySpark Jobs. Builds Python dependencies and ships with Spark. Skips building PySpark docker image if not specified. -R file (Optional) Dockerfile to build for SparkR Jobs. Builds R dependencies and ships with Spark. Skips building SparkR docker image if not specified. -r repo Repository address. -t tag Tag to apply to the built image, or to identify the image to be pushed. -m Use minikube's Docker daemon. -n Build docker image with --no-cache -u uid UID to use in the USER directive to set the user the main Spark process runs as inside the resulting container -X Use docker buildx to cross build. Automatically pushes. See https://docs.docker.com/buildx/working-with-buildx/ for steps to setup buildx. -b arg Build arg to build or push the image. For multiple build args, this option needs to be used separately for each build arg. Using minikube when building images will do so directly into minikube's Docker daemon. There is no need to push the images into minikube in that case, they'll be automatically available when running applications inside the minikube cluster. Check the following documentation for more information on using the minikube Docker daemon: https://kubernetes.io/docs/getting-started-guides/minikube/#reusing-the-docker-daemon Examples: - Build image in minikube with tag "testing" $0 -m -t testing build - Build PySpark docker image $0 -r docker.io/myrepo -t v3.4.0 -p kubernetes/dockerfiles/spark/bindings/python/Dockerfile build - Build and push image with tag "v3.4.0" to docker.io/myrepo $0 -r docker.io/myrepo -t v3.4.0 build $0 -r docker.io/myrepo -t v3.4.0 push - Build and push Java11-based image with tag "v3.4.0" to docker.io/myrepo $0 -r docker.io/myrepo -t v3.4.0 -b java_image_tag=11-jre build $0 -r docker.io/myrepo -t v3.4.0 push - Build and push image for multiple archs to docker.io/myrepo $0 -r docker.io/myrepo -t v3.4.0 -X build # Note: buildx, which does cross building, needs to do the push during build # So there is no separate push step with -X EOF } if [[ "$@" = *--help ]] || [[ "$@" = *-h ]]; then usage exit 0 fi REPO= TAG= BASEDOCKERFILE= PYDOCKERFILE= RDOCKERFILE= NOCACHEARG= BUILD_PARAMS= SPARK_UID= CROSS_BUILD="false" while getopts f:p:R:mr:t:Xnb:u: option do case "${option}" in f) BASEDOCKERFILE=$(resolve_file ${OPTARG});; p) PYDOCKERFILE=$(resolve_file ${OPTARG});; R) RDOCKERFILE=$(resolve_file ${OPTARG});; r) REPO=${OPTARG};; t) TAG=${OPTARG};; n) NOCACHEARG="--no-cache";; b) BUILD_PARAMS=${BUILD_PARAMS}" --build-arg "${OPTARG};; X) CROSS_BUILD=1;; m) if ! which minikube 1>/dev/null; then error "Cannot find minikube." fi if ! minikube status 1>/dev/null; then error "Cannot contact minikube. Make sure it's running." fi eval $(minikube docker-env --shell bash) ;; u) SPARK_UID=${OPTARG};; esac done case "${@: -1}" in build) build ;; push) if [ -z "$REPO" ]; then usage exit 1 fi push ;; *) usage exit 1 ;; esac