Posts Tagged ‘ubuntu’

sudo apt update
sudo apt install git
cd
rm -rf spectre-meltdown-checker
git clone https://github.com/speed47/spectre-meltdown-checker.git
cd spectre-meltdown-checker
chmod +x spectre-meltdown-checker.sh
sudo ./spectre-meltdown-checker.sh

grep CONFIG_PAGE_TABLE_ISOLATION=y /boot/config-`uname -r` && echo "patched :)" || echo "unpatched :("
grep cpu_insecure /proc/cpuinfo && echo "patched :)" || echo "unpatched :("
dmesg | grep "Kernel/User page tables isolation: enabled" && echo "patched :)" || echo "unpatched :("
uname -a
Advertisements
# enable new Quad9 (9.9.9.9) DNS and DNSSEC service 
# in Ubuntu 17.10 64-bit using a bash shell script
sudo apt purge unbound
LogTime=$(date '+%Y-%m-%d_%Hh%Mm%Ss')
cp /etc/resolv.conf $HOME/resolv.conf_$LogTime
cp /etc/nsswitch.conf $HOME/nsswitch.conf_$LogTime
cp /etc/systemd/resolved.conf $HOME/resolved.conf_$LogTime

sudo service resolvconf stop
sudo update-rc.d resolvconf remove
cp /etc/resolv.conf /tmp/resolv.conf
grep -v nameserver /tmp/resolv.conf > /tmp/resolv.conf.1
echo 'nameserver 9.9.9.9' >> /tmp/resolv.conf.1
sudo cp /tmp/resolv.conf.1 /etc/resolv.conf
sudo service resolvconf start

# enable systemd caching DNS resolver
rm /tmp/nsswitch.conf
rm /tmp/nsswitch.conf.1
cp /etc/nsswitch.conf /tmp/nsswitch.conf
grep -v hosts /tmp/nsswitch.conf > /tmp/nsswitch.conf.1
# dns must be mentioned in next line, or else wget does not work
echo 'hosts: files mdns4_minimal [NOTFOUND=return] resolv dns myhostname mymachines' >> /tmp/nsswitch.conf.1
sudo cp /tmp/nsswitch.conf.1 /etc/nsswitch.conf

# set DNS server to 9.9.9.9
rm /tmp/resolved.conf
rm /tmp/resolved.conf.1
cp /etc/systemd/resolved.conf /tmp/resolved.conf
grep -v DNS /tmp/resolved.conf > /tmp/resolved.conf.1
# enable new Quad9 (9.9.9.9) DNS and DNSSEC service
# https://arstechnica.com/information-technology/2017/11/new-quad9-dns-service-blocks-malicious-domains-for-everyone/
echo 'DNS=9.9.9.9' >> /tmp/resolved.conf.1
echo 'DNSSEC=yes' >> /tmp/resolved.conf.1
sudo cp /tmp/resolved.conf.1 /etc/systemd/resolved.conf
sudo systemd-resolve --flush-caches
sudo systemctl restart systemd-resolved
sudo systemd-resolve --flush-caches
sudo systemd-resolve --status

# It is probably also necessary to manually set
# the DNS server to 9.9.9.9 in the router's configuration
# and in the NetworkManager GUI

# test DNSSEC validation using dig command-line tool and using DNS server 9.9.9.9:
# see: https://docs.menandmice.com/display/MM/How+to+test+DNSSEC+validation
dig pir.org +dnssec +multi
cd /tmp
rm *.deb
wget --no-check-certificate https://vivaldi.com/download/
wget --no-check-certificate `grep deb index.html |grep amd64|cut -d"\"" -f4`
sudo dpkg -i vivaldi*.deb
sudo apt-get install -f
# .R script showing capabilities of sparklyr R package
# Prerequisites before running this R script: 
# Ubuntu 16.04.3 LTS 64-bit, r-base (version 3.4.1 or newer), 
# RStudio 64-bit version, libssl-dev, libcurl4-openssl-dev, libxml2-dev
install.packages("httr")
install.packages("xml2")
# New features in sparklyr 0.6:
# https://blog.rstudio.com/2017/07/31/sparklyr-0-6/
install.packages("sparklyr")
install.packages("dplyr")
install.packages("ggplot2")
install.packages("tidyr")
library(sparklyr)
library(dplyr)
library(ggplot2)
library(tidyr)
set.seed(100)
# sparklyr cheat sheet: https://github.com/rstudio/cheatsheets/raw/master/source/pdfs/sparklyr.pdf
# dplyr+tidyr: https://www.rstudio.com/wp-content/uploads/2015/02/data-wrangling-cheatsheet.pdf
# sparklyr currently (2017-08-19) only supports Apache Spark version 2.2.0 or older
# Install Spark locally:
sc_version <- "2.2.0"
spark_install(sc_version)
config <- spark_config()
# number of CPU cores to use:
config$spark.executor.cores <- 6
# amount of RAM to use for Apache Spark executors:
config$spark.executor.memory <- "4G"
# Connect to local version:
sc <- spark_connect (master = "local",
 config = config, version = sc_version)
# Copy data to Spark memory:
import_iris <- sdf_copy_to(sc, iris, "spark_iris", overwrite = TRUE) 
# partition data:
partition_iris <- sdf_partition(import_iris,training=0.5, testing=0.5) 
# Create a hive metadata for each partition:
sdf_register(partition_iris,c("spark_iris_training","spark_iris_test")) 
# Create reference to training data in Spark table
tidy_iris <- tbl(sc,"spark_iris_training") %>% select(Species, Petal_Length, Petal_Width) 
# Spark ML Decision Tree Model
model_iris <- tidy_iris %>% ml_decision_tree(response="Species", features=c("Petal_Length","Petal_Width")) 
# Create reference to test data in Spark table
test_iris <- tbl(sc,"spark_iris_test") 
# Bring predictions data back into R memory for plotting:
pred_iris <- sdf_predict(model_iris, test_iris) %>% collect
pred_iris %>%
 inner_join(data.frame(prediction=0:2,
 lab=model_iris$model.parameters$labels)) %>%
 ggplot(aes(Petal_Length, Petal_Width, col=lab)) +
 geom_point() 
spark_disconnect(sc)
cd
sudo apt update
sudo apt install cmake build-essential
sudo apt install checkinstall git
sudo apt remove hashcat
sudo apt build-dep hashcat
sudo rm -rf hashcat/
git clone https://github.com/hashcat/hashcat.git
cd hashcat
git submodule update --init
sudo make
sudo checkinstall
hashcat --version
# hashcat version should be v3.5.0 or newer
# install all dependencies for gifine:
cd
sudo apt update
sudo apt install ffmpeg graphicsmagick gifsicle luarocks
sudo apt install cmake libgirepository1.0-dev build-essential git
sudo apt install libxext-dev checkinstall libimlib2-dev
sudo apt install libimlib2 mesa-common-dev libxrender-dev
sudo apt install libxrandr-dev libglew-dev libglm-dev libglu1-mesa-dev
sudo apt build-dep graphicsmagick
git clone https://github.com/naelstrof/slop.git
cd slop
cmake -DCMAKE_OPENGL_SUPPORT=true ./
sudo make
sudo make install
# install gifine:
sudo luarocks install --server=http://luarocks.org/dev gifine
# test with this command: gifine

 

#!/bin/bash
# linapple (Apple IIe emulator)
sudo apt-get update
sudo apt-get install git unp build-essential checkinstall libopenal-dev 
sudo apt-get install freeglut3-dev gawk libsdl1.2-dev libcurl4-openssl-dev zlib1g-dev libzip-dev
cd
sudo rm -rf apple2
sudo rm -rf linapple
git clone https://github.com/LasDesu/linapple.git
mkdir ~/linapple/disks
cd ~/linapple/src
sudo make
cd ~/linapple
# Get ProDOS User Guide
wget --no-check-certificate  http://www.applelogic.org/files/PRODOSUM.pdf
# Get Apple IIe ROM
wget --no-check-certificate  ftp://ftp.apple.asimov.net/pub/apple_II/emulators/rom_images/apple_iie_rom.zip
wget --no-check-certificate  ftp://ftp.apple.asimov.net/pub/apple_II/emulators/rom_images/077-0018%20Apple%20IIe%20Diagnostic%20Card%20-%20English%20-%20Lower%20ROM%202764.bin
wget --no-check-certificate  ftp://ftp.apple.asimov.net/pub/apple_II/emulators/rom_images/077-0019%20Apple%20IIe%20Diagnostic%20Card%20-%20English%20-%20Upper%20ROM%202764.bin
wget --no-check-certificate  ftp://ftp.apple.asimov.net/pub/apple_II/emulators/rom_images/2764_APPLE-IIe-0341-0162-A_PAL_SWE_FIN.bin
# get newest ProDOS 2.4.1 operating system for Apple IIe (release date: September 2016!)
wget --no-check-certificate https://archive.org/download/ProDOS_2_4_1/ProDOS_2_4_1.dsk
# get Apple II games
wget --no-check-certificate  http://www.virtualapple.org/apple2/Breakout_Chipout_Hi-Res_Breakout.zip
wget --no-check-certificate  http://www.virtualapple.org/apple2/Galaxian.zip
wget --no-check-certificate  http://www.virtualapple.org/apple2/SantaParaviaandFiumaccio.zip
wget --no-check-certificate  http://www.virtualapple.org/apple2/Karateka.zip
unp *.zip
mv *.dsk ~/linapple/disks/
~/linapple/linapple
# Press F3 in linapple and navigate to ~/linapple/disks and select the game to load
# Then press CTRL-SHIFT-F2 to restart the linapple emulator and load the game