Windows NT KAMIDAKI 10.0 build 19045 (Windows 10) AMD64
Apache/2.4.58 (Win64) OpenSSL/3.1.3 PHP/8.3.9
Server IP : 192.168.3.16 & Your IP : 216.73.216.187
Domains :
Cant Read [ /etc/named.conf ]
User : SISTEMA
Terminal
Auto Root
Create File
Create Folder
Localroot Suggester
Backdoor Destroyer
Readme
C: /
Users /
VEGETA /
Envs /
Pessoais /
Scripts /
Delete
Unzip
Name
Size
Permission
Date
Action
__pycache__
[ DIR ]
drwxrwxrwx
2023-10-10 16:09
activate
2.1
KB
-rw-rw-rw-
2023-06-29 03:40
activate.bat
1005
B
-rwxrwxrwx
2023-06-29 03:40
activate.fish
2.96
KB
-rw-rw-rw-
2023-06-29 03:40
activate.nu
2.52
KB
-rw-rw-rw-
2023-06-29 03:40
activate.ps1
1.72
KB
-rw-rw-rw-
2023-06-29 03:40
activate_this.py
1.14
KB
-rw-rw-rw-
2023-06-29 03:40
ansi2html.exe
105.86
KB
-rwxrwxrwx
2023-06-29 04:17
beeline
1.06
KB
-rw-rw-rw-
2023-10-10 16:09
beeline.cmd
1.04
KB
-rwxrwxrwx
2023-10-10 16:09
chardetect.exe
105.87
KB
-rwxrwxrwx
2023-11-14 14:19
dash-generate-components.exe
105.88
KB
-rwxrwxrwx
2023-06-29 04:18
dash-update-components.exe
105.88
KB
-rwxrwxrwx
2023-06-29 04:18
deactivate.bat
511
B
-rwxrwxrwx
2023-06-29 03:40
deactivate.nu
682
B
-rw-rw-rw-
2023-06-29 03:40
django-admin.exe
105.91
KB
-rwxrwxrwx
2024-02-25 20:38
docker-image-tool.sh
10.73
KB
-rw-rw-rw-
2023-10-10 16:09
f2py.exe
105.86
KB
-rwxrwxrwx
2023-06-29 03:41
find-spark-home
1.89
KB
-rw-rw-rw-
2023-10-10 16:09
find-spark-home.cmd
2.62
KB
-rwxrwxrwx
2023-10-10 16:09
find_spark_home.py
4.1
KB
-rw-rw-rw-
2023-10-10 16:09
flask.exe
105.85
KB
-rwxrwxrwx
2023-06-29 04:18
fonttools.exe
105.86
KB
-rwxrwxrwx
2023-06-29 03:41
game.exe
105.85
KB
-rwxrwxrwx
2024-03-11 21:24
google
4.63
KB
-rw-rw-rw-
2023-11-15 23:29
icalendar.exe
105.86
KB
-rwxrwxrwx
2024-02-25 20:37
load-spark-env.cmd
2.28
KB
-rwxrwxrwx
2023-10-10 16:09
load-spark-env.sh
2.62
KB
-rw-rw-rw-
2023-10-10 16:09
mss.exe
105.86
KB
-rwxrwxrwx
2023-09-21 16:50
normalizer.exe
105.89
KB
-rwxrwxrwx
2023-06-29 04:17
pip-3.11.exe
105.87
KB
-rwxrwxrwx
2023-06-29 03:40
pip.exe
105.87
KB
-rwxrwxrwx
2023-06-29 03:40
pip3.11.exe
105.87
KB
-rwxrwxrwx
2023-06-29 03:40
pip3.exe
105.87
KB
-rwxrwxrwx
2023-06-29 03:40
pydoc.bat
24
B
-rwxrwxrwx
2023-06-29 03:40
pyfiglet.exe
105.85
KB
-rwxrwxrwx
2024-02-26 00:55
pyftmerge.exe
105.86
KB
-rwxrwxrwx
2023-06-29 03:41
pyftsubset.exe
105.86
KB
-rwxrwxrwx
2023-06-29 03:41
pyspark
2.57
KB
-rw-rw-rw-
2023-10-10 16:09
pyspark.cmd
1.14
KB
-rwxrwxrwx
2023-10-10 16:09
pyspark2.cmd
1.51
KB
-rwxrwxrwx
2023-10-10 16:09
python.exe
264.27
KB
-rwxrwxrwx
2023-06-29 03:40
pythonw.exe
253.27
KB
-rwxrwxrwx
2023-06-29 03:40
renderer.exe
105.88
KB
-rwxrwxrwx
2023-06-29 04:18
run-example
1.01
KB
-rw-rw-rw-
2023-10-10 16:09
run-example.cmd
1.19
KB
-rwxrwxrwx
2023-10-10 16:09
scapy.exe
105.86
KB
-rwxrwxrwx
2023-11-20 00:59
spark-class
3.48
KB
-rw-rw-rw-
2023-10-10 16:09
spark-class.cmd
1.15
KB
-rwxrwxrwx
2023-10-10 16:09
spark-class2.cmd
2.82
KB
-rwxrwxrwx
2023-10-10 16:09
spark-connect-shell
1.13
KB
-rw-rw-rw-
2023-10-10 16:09
spark-shell
3.05
KB
-rw-rw-rw-
2023-10-10 16:09
spark-shell.cmd
1.15
KB
-rwxrwxrwx
2023-10-10 16:09
spark-shell2.cmd
1.78
KB
-rwxrwxrwx
2023-10-10 16:09
spark-sql
1.04
KB
-rw-rw-rw-
2023-10-10 16:09
spark-sql.cmd
1.15
KB
-rwxrwxrwx
2023-10-10 16:09
spark-sql2.cmd
1.09
KB
-rwxrwxrwx
2023-10-10 16:09
spark-submit
1.02
KB
-rw-rw-rw-
2023-10-10 16:09
spark-submit.cmd
1.15
KB
-rwxrwxrwx
2023-10-10 16:09
spark-submit2.cmd
1.13
KB
-rwxrwxrwx
2023-10-10 16:09
sparkR
1.02
KB
-rw-rw-rw-
2023-10-10 16:09
sparkR.cmd
1.14
KB
-rwxrwxrwx
2023-10-10 16:09
sparkR2.cmd
1.07
KB
-rwxrwxrwx
2023-10-10 16:09
sqlformat.exe
105.86
KB
-rwxrwxrwx
2024-02-25 20:37
tabulate.exe
105.85
KB
-rwxrwxrwx
2024-03-12 19:19
telnetlib3-client.exe
105.86
KB
-rwxrwxrwx
2023-09-21 15:21
telnetlib3-server.exe
105.86
KB
-rwxrwxrwx
2023-09-21 15:21
translate
1.39
KB
-rw-rw-rw-
2023-11-14 14:19
translate-cli.exe
105.86
KB
-rwxrwxrwx
2023-11-15 23:30
ttx.exe
105.86
KB
-rwxrwxrwx
2023-06-29 03:41
wheel-3.11.exe
105.85
KB
-rwxrwxrwx
2023-06-29 03:40
wheel.exe
105.85
KB
-rwxrwxrwx
2023-06-29 03:40
wheel3.11.exe
105.85
KB
-rwxrwxrwx
2023-06-29 03:40
wheel3.exe
105.85
KB
-rwxrwxrwx
2023-06-29 03:40
Save
Rename
#!/usr/bin/env bash # # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # # Shell script for starting the Spark Shell REPL cygwin=false case "$(uname)" in CYGWIN*) cygwin=true;; esac # Enter posix mode for bash set -o posix if [ -z "${SPARK_HOME}" ]; then source "$(dirname "$0")"/find-spark-home fi export _SPARK_CMD_USAGE="Usage: ./bin/spark-shell [options] Scala REPL options: -I <file> preload <file>, enforcing line-by-line interpretation" # SPARK-4161: scala does not assume use of the java classpath, # so we need to add the "-Dscala.usejavacp=true" flag manually. We # do this specifically for the Spark shell because the scala REPL # has its own class loader, and any additional classpath specified # through spark.driver.extraClassPath is not automatically propagated. SPARK_SUBMIT_OPTS="$SPARK_SUBMIT_OPTS -Dscala.usejavacp=true" function main() { if $cygwin; then # Workaround for issue involving JLine and Cygwin # (see http://sourceforge.net/p/jline/bugs/40/). # If you're using the Mintty terminal emulator in Cygwin, may need to set the # "Backspace sends ^H" setting in "Keys" section of the Mintty options # (see https://github.com/sbt/sbt/issues/562). stty -icanon min 1 -echo > /dev/null 2>&1 export SPARK_SUBMIT_OPTS="$SPARK_SUBMIT_OPTS -Djline.terminal=unix" "${SPARK_HOME}"/bin/spark-submit --class org.apache.spark.repl.Main --name "Spark shell" "$@" stty icanon echo > /dev/null 2>&1 else export SPARK_SUBMIT_OPTS "${SPARK_HOME}"/bin/spark-submit --class org.apache.spark.repl.Main --name "Spark shell" "$@" fi } # Copy restore-TTY-on-exit functions from Scala script so spark-shell exits properly even in # binary distribution of Spark where Scala is not installed exit_status=127 saved_stty="" # restore stty settings (echo in particular) function restoreSttySettings() { stty $saved_stty saved_stty="" } function onExit() { if [[ "$saved_stty" != "" ]]; then restoreSttySettings fi exit $exit_status } # to reenable echo if we are interrupted before completing. trap onExit INT # save terminal settings saved_stty=$(stty -g 2>/dev/null) # clear on error so we don't later try to restore them if [[ ! $? ]]; then saved_stty="" fi main "$@" # record the exit status lest it be overwritten: # then reenable echo and propagate the code. exit_status=$? onExit