Windows NT KAMIDAKI 10.0 build 19045 (Windows 10) AMD64
Apache/2.4.58 (Win64) OpenSSL/3.1.3 PHP/8.3.9
Server IP : 192.168.3.16 & Your IP : 216.73.216.187
Domains :
Cant Read [ /etc/named.conf ]
User : SISTEMA
Terminal
Auto Root
Create File
Create Folder
Localroot Suggester
Backdoor Destroyer
Readme
C: /
Users /
VEGETA /
Envs /
Pessoais /
Scripts /
Delete
Unzip
Name
Size
Permission
Date
Action
__pycache__
[ DIR ]
drwxrwxrwx
2023-10-10 16:09
activate
2.1
KB
-rw-rw-rw-
2023-06-29 03:40
activate.bat
1005
B
-rwxrwxrwx
2023-06-29 03:40
activate.fish
2.96
KB
-rw-rw-rw-
2023-06-29 03:40
activate.nu
2.52
KB
-rw-rw-rw-
2023-06-29 03:40
activate.ps1
1.72
KB
-rw-rw-rw-
2023-06-29 03:40
activate_this.py
1.14
KB
-rw-rw-rw-
2023-06-29 03:40
ansi2html.exe
105.86
KB
-rwxrwxrwx
2023-06-29 04:17
beeline
1.06
KB
-rw-rw-rw-
2023-10-10 16:09
beeline.cmd
1.04
KB
-rwxrwxrwx
2023-10-10 16:09
chardetect.exe
105.87
KB
-rwxrwxrwx
2023-11-14 14:19
dash-generate-components.exe
105.88
KB
-rwxrwxrwx
2023-06-29 04:18
dash-update-components.exe
105.88
KB
-rwxrwxrwx
2023-06-29 04:18
deactivate.bat
511
B
-rwxrwxrwx
2023-06-29 03:40
deactivate.nu
682
B
-rw-rw-rw-
2023-06-29 03:40
django-admin.exe
105.91
KB
-rwxrwxrwx
2024-02-25 20:38
docker-image-tool.sh
10.73
KB
-rw-rw-rw-
2023-10-10 16:09
f2py.exe
105.86
KB
-rwxrwxrwx
2023-06-29 03:41
find-spark-home
1.89
KB
-rw-rw-rw-
2023-10-10 16:09
find-spark-home.cmd
2.62
KB
-rwxrwxrwx
2023-10-10 16:09
find_spark_home.py
4.1
KB
-rw-rw-rw-
2023-10-10 16:09
flask.exe
105.85
KB
-rwxrwxrwx
2023-06-29 04:18
fonttools.exe
105.86
KB
-rwxrwxrwx
2023-06-29 03:41
game.exe
105.85
KB
-rwxrwxrwx
2024-03-11 21:24
google
4.63
KB
-rw-rw-rw-
2023-11-15 23:29
icalendar.exe
105.86
KB
-rwxrwxrwx
2024-02-25 20:37
load-spark-env.cmd
2.28
KB
-rwxrwxrwx
2023-10-10 16:09
load-spark-env.sh
2.62
KB
-rw-rw-rw-
2023-10-10 16:09
mss.exe
105.86
KB
-rwxrwxrwx
2023-09-21 16:50
normalizer.exe
105.89
KB
-rwxrwxrwx
2023-06-29 04:17
pip-3.11.exe
105.87
KB
-rwxrwxrwx
2023-06-29 03:40
pip.exe
105.87
KB
-rwxrwxrwx
2023-06-29 03:40
pip3.11.exe
105.87
KB
-rwxrwxrwx
2023-06-29 03:40
pip3.exe
105.87
KB
-rwxrwxrwx
2023-06-29 03:40
pydoc.bat
24
B
-rwxrwxrwx
2023-06-29 03:40
pyfiglet.exe
105.85
KB
-rwxrwxrwx
2024-02-26 00:55
pyftmerge.exe
105.86
KB
-rwxrwxrwx
2023-06-29 03:41
pyftsubset.exe
105.86
KB
-rwxrwxrwx
2023-06-29 03:41
pyspark
2.57
KB
-rw-rw-rw-
2023-10-10 16:09
pyspark.cmd
1.14
KB
-rwxrwxrwx
2023-10-10 16:09
pyspark2.cmd
1.51
KB
-rwxrwxrwx
2023-10-10 16:09
python.exe
264.27
KB
-rwxrwxrwx
2023-06-29 03:40
pythonw.exe
253.27
KB
-rwxrwxrwx
2023-06-29 03:40
renderer.exe
105.88
KB
-rwxrwxrwx
2023-06-29 04:18
run-example
1.01
KB
-rw-rw-rw-
2023-10-10 16:09
run-example.cmd
1.19
KB
-rwxrwxrwx
2023-10-10 16:09
scapy.exe
105.86
KB
-rwxrwxrwx
2023-11-20 00:59
spark-class
3.48
KB
-rw-rw-rw-
2023-10-10 16:09
spark-class.cmd
1.15
KB
-rwxrwxrwx
2023-10-10 16:09
spark-class2.cmd
2.82
KB
-rwxrwxrwx
2023-10-10 16:09
spark-connect-shell
1.13
KB
-rw-rw-rw-
2023-10-10 16:09
spark-shell
3.05
KB
-rw-rw-rw-
2023-10-10 16:09
spark-shell.cmd
1.15
KB
-rwxrwxrwx
2023-10-10 16:09
spark-shell2.cmd
1.78
KB
-rwxrwxrwx
2023-10-10 16:09
spark-sql
1.04
KB
-rw-rw-rw-
2023-10-10 16:09
spark-sql.cmd
1.15
KB
-rwxrwxrwx
2023-10-10 16:09
spark-sql2.cmd
1.09
KB
-rwxrwxrwx
2023-10-10 16:09
spark-submit
1.02
KB
-rw-rw-rw-
2023-10-10 16:09
spark-submit.cmd
1.15
KB
-rwxrwxrwx
2023-10-10 16:09
spark-submit2.cmd
1.13
KB
-rwxrwxrwx
2023-10-10 16:09
sparkR
1.02
KB
-rw-rw-rw-
2023-10-10 16:09
sparkR.cmd
1.14
KB
-rwxrwxrwx
2023-10-10 16:09
sparkR2.cmd
1.07
KB
-rwxrwxrwx
2023-10-10 16:09
sqlformat.exe
105.86
KB
-rwxrwxrwx
2024-02-25 20:37
tabulate.exe
105.85
KB
-rwxrwxrwx
2024-03-12 19:19
telnetlib3-client.exe
105.86
KB
-rwxrwxrwx
2023-09-21 15:21
telnetlib3-server.exe
105.86
KB
-rwxrwxrwx
2023-09-21 15:21
translate
1.39
KB
-rw-rw-rw-
2023-11-14 14:19
translate-cli.exe
105.86
KB
-rwxrwxrwx
2023-11-15 23:30
ttx.exe
105.86
KB
-rwxrwxrwx
2023-06-29 03:41
wheel-3.11.exe
105.85
KB
-rwxrwxrwx
2023-06-29 03:40
wheel.exe
105.85
KB
-rwxrwxrwx
2023-06-29 03:40
wheel3.11.exe
105.85
KB
-rwxrwxrwx
2023-06-29 03:40
wheel3.exe
105.85
KB
-rwxrwxrwx
2023-06-29 03:40
Save
Rename
#!/usr/bin/env bash # # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # if [ -z "${SPARK_HOME}" ]; then source "$(dirname "$0")"/find-spark-home fi . "${SPARK_HOME}"/bin/load-spark-env.sh # Find the java binary if [ -n "${JAVA_HOME}" ]; then RUNNER="${JAVA_HOME}/bin/java" else if [ "$(command -v java)" ]; then RUNNER="java" else echo "JAVA_HOME is not set" >&2 exit 1 fi fi # Find Spark jars. if [ -d "${SPARK_HOME}/jars" ]; then SPARK_JARS_DIR="${SPARK_HOME}/jars" else SPARK_JARS_DIR="${SPARK_HOME}/assembly/target/scala-$SPARK_SCALA_VERSION/jars" fi if [ ! -d "$SPARK_JARS_DIR" ] && [ -z "$SPARK_TESTING$SPARK_SQL_TESTING" ]; then echo "Failed to find Spark jars directory ($SPARK_JARS_DIR)." 1>&2 echo "You need to build Spark with the target \"package\" before running this program." 1>&2 exit 1 else LAUNCH_CLASSPATH="$SPARK_JARS_DIR/*" fi # Add the launcher build dir to the classpath if requested. if [ -n "$SPARK_PREPEND_CLASSES" ]; then LAUNCH_CLASSPATH="${SPARK_HOME}/launcher/target/scala-$SPARK_SCALA_VERSION/classes:$LAUNCH_CLASSPATH" fi # For tests if [[ -n "$SPARK_TESTING" ]]; then unset YARN_CONF_DIR unset HADOOP_CONF_DIR fi # The launcher library will print arguments separated by a NULL character, to allow arguments with # characters that would be otherwise interpreted by the shell. Read that in a while loop, populating # an array that will be used to exec the final command. # # The exit code of the launcher is appended to the output, so the parent shell removes it from the # command array and checks the value to see if the launcher succeeded. build_command() { "$RUNNER" -Xmx128m $SPARK_LAUNCHER_OPTS -cp "$LAUNCH_CLASSPATH" org.apache.spark.launcher.Main "$@" printf "%d\0" $? } # Turn off posix mode since it does not allow process substitution set +o posix CMD=() DELIM=$'\n' CMD_START_FLAG="false" while IFS= read -d "$DELIM" -r _ARG; do ARG=${_ARG//$'\r'} if [ "$CMD_START_FLAG" == "true" ]; then CMD+=("$ARG") else if [ "$ARG" == $'\0' ]; then # After NULL character is consumed, change the delimiter and consume command string. DELIM='' CMD_START_FLAG="true" elif [ "$ARG" != "" ]; then echo "$ARG" fi fi done < <(build_command "$@") COUNT=${#CMD[@]} LAST=$((COUNT - 1)) LAUNCHER_EXIT_CODE=${CMD[$LAST]} # Certain JVM failures result in errors being printed to stdout (instead of stderr), which causes # the code that parses the output of the launcher to get confused. In those cases, check if the # exit code is an integer, and if it's not, handle it as a special error case. if ! [[ $LAUNCHER_EXIT_CODE =~ ^[0-9]+$ ]]; then echo "${CMD[@]}" | head -n-1 1>&2 exit 1 fi if [ $LAUNCHER_EXIT_CODE != 0 ]; then exit $LAUNCHER_EXIT_CODE fi CMD=("${CMD[@]:0:$LAST}") exec "${CMD[@]}"