diff --git a/.nojekyll b/.nojekyll new file mode 100644 index 00000000..e69de29b diff --git a/404.html b/404.html new file mode 100644 index 00000000..8f371dcf --- /dev/null +++ b/404.html @@ -0,0 +1,148 @@ + + + + + + + + Introduction to Kebnekaise + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
    +
  • +
  • +
  • +
+
+
+
+
+ + +

404

+ +

Page not found

+ + +
+
+ +
+
+ +
+ +
+ +
+ + + + + +
+ + + + + + + + + + + diff --git a/assets/_markdown_exec_ansi.css b/assets/_markdown_exec_ansi.css new file mode 100644 index 00000000..68bcc480 --- /dev/null +++ b/assets/_markdown_exec_ansi.css @@ -0,0 +1,355 @@ +/* + Inspired by https://spec.draculatheme.com/ specification, they should work + decently with both dark and light themes. + */ +:root { + --ansi-red: #ff5555; + --ansi-green: #50fa7b; + --ansi-blue: #265285; + --ansi-yellow: #ffb86c; + --ansi-magenta: #bd93f9; + --ansi-cyan: #8be9fd; + --ansi-black: #282a36; + --ansi-white: #f8f8f2; +} + +.-Color-Green, +.-Color-Faint-Green, +.-Color-Bold-Green, +.-Color-BrightGreen { + color: var(--ansi-green); +} + +.-Color-Red, +.-Color-Faint-Red, +.-Color-Bold-Red, +.-Color-BrightRed { + color: var(--ansi-red); +} + +.-Color-Yellow, +.-Color-Faint-Yellow, +.-Color-Bold-Yellow, +.-Color-BrightYellow { + color: var(--ansi-yellow); +} + +.-Color-Blue, +.-Color-Faint-Blue, +.-Color-Bold-Blue, +.-Color-BrightBlue { + color: var(--ansi-blue); +} + +.-Color-Magenta, +.-Color-Faint-Magenta, +.-Color-Bold-Magenta, +.-Color-BrightMagenta { + color: var(--ansi-magenta); +} + +.-Color-Cyan, +.-Color-Faint-Cyan, +.-Color-Bold-Cyan, +.-Color-BrightCyan { + color: var(--ansi-cyan); +} + +.-Color-White, +.-Color-Faint-White, +.-Color-Bold-White, +.-Color-BrightWhite { + color: var(--ansi-white); +} + +.-Color-Black, +.-Color-Faint-Black, +.-Color-Bold-Black, +.-Color-BrightBlack { + color: var(--ansi-black); +} + +.-Color-Faint { + opacity: 0.5; +} + +.-Color-Bold { + font-weight: bold; +} + +.-Color-BGBlack, +.-Color-Black-BGBlack, +.-Color-Blue-BGBlack, +.-Color-Bold-BGBlack, +.-Color-BrightBGBlack, +.-Color-Bold-Black-BGBlack, +.-Color-BrightBlack-BGBlack, +.-Color-Bold-Green-BGBlack, +.-Color-BrightGreen-BGBlack, +.-Color-Bold-Cyan-BGBlack, +.-Color-BrightCyan-BGBlack, +.-Color-Bold-Blue-BGBlack, +.-Color-BrightBlue-BGBlack, +.-Color-Bold-Magenta-BGBlack, +.-Color-BrightMagenta-BGBlack, +.-Color-Bold-Red-BGBlack, +.-Color-BrightRed-BGBlack, +.-Color-Bold-White-BGBlack, +.-Color-BrightWhite-BGBlack, +.-Color-Bold-Yellow-BGBlack, +.-Color-BrightYellow-BGBlack, +.-Color-Cyan-BGBlack, +.-Color-Green-BGBlack, +.-Color-Magenta-BGBlack, +.-Color-Red-BGBlack, +.-Color-White-BGBlack, +.-Color-Yellow-BGBlack { + background-color: var(--ansi-black); +} + +.-Color-BGRed, +.-Color-Black-BGRed, +.-Color-Blue-BGRed, +.-Color-Bold-BGRed, +.-Color-BrightBGRed, +.-Color-Bold-Black-BGRed, +.-Color-BrightBlack-BGRed, +.-Color-Bold-Green-BGRed, +.-Color-BrightGreen-BGRed, +.-Color-Bold-Cyan-BGRed, +.-Color-BrightCyan-BGRed, +.-Color-Bold-Blue-BGRed, +.-Color-BrightBlue-BGRed, +.-Color-Bold-Magenta-BGRed, +.-Color-BrightMagenta-BGRed, +.-Color-Bold-Red-BGRed, +.-Color-BrightRed-BGRed, +.-Color-Bold-White-BGRed, +.-Color-BrightWhite-BGRed, +.-Color-Bold-Yellow-BGRed, +.-Color-BrightYellow-BGRed, +.-Color-Cyan-BGRed, +.-Color-Green-BGRed, +.-Color-Magenta-BGRed, +.-Color-Red-BGRed, +.-Color-White-BGRed, +.-Color-Yellow-BGRed { + background-color: var(--ansi-red); +} + +.-Color-BGGreen, +.-Color-Black-BGGreen, +.-Color-Blue-BGGreen, +.-Color-Bold-BGGreen, +.-Color-BrightBGGreen, +.-Color-Bold-Black-BGGreen, +.-Color-BrightBlack-BGGreen, +.-Color-Bold-Green-BGGreen, +.-Color-BrightGreen-BGGreen, +.-Color-Bold-Cyan-BGGreen, +.-Color-BrightCyan-BGGreen, +.-Color-Bold-Blue-BGGreen, +.-Color-BrightBlue-BGGreen, +.-Color-Bold-Magenta-BGGreen, +.-Color-BrightMagenta-BGGreen, +.-Color-Bold-Red-BGGreen, +.-Color-BrightRed-BGGreen, +.-Color-Bold-White-BGGreen, +.-Color-BrightWhite-BGGreen, +.-Color-Bold-Yellow-BGGreen, +.-Color-BrightYellow-BGGreen, +.-Color-Cyan-BGGreen, +.-Color-Green-BGGreen, +.-Color-Magenta-BGGreen, +.-Color-Red-BGGreen, +.-Color-White-BGGreen, +.-Color-Yellow-BGGreen { + background-color: var(--ansi-green); +} + +.-Color-BGYellow, +.-Color-Black-BGYellow, +.-Color-Blue-BGYellow, +.-Color-Bold-BGYellow, +.-Color-BrightBGYellow, +.-Color-Bold-Black-BGYellow, +.-Color-BrightBlack-BGYellow, +.-Color-Bold-Green-BGYellow, +.-Color-BrightGreen-BGYellow, +.-Color-Bold-Cyan-BGYellow, +.-Color-BrightCyan-BGYellow, +.-Color-Bold-Blue-BGYellow, +.-Color-BrightBlue-BGYellow, +.-Color-Bold-Magenta-BGYellow, +.-Color-BrightMagenta-BGYellow, +.-Color-Bold-Red-BGYellow, +.-Color-BrightRed-BGYellow, +.-Color-Bold-White-BGYellow, +.-Color-BrightWhite-BGYellow, +.-Color-Bold-Yellow-BGYellow, +.-Color-BrightYellow-BGYellow, +.-Color-Cyan-BGYellow, +.-Color-Green-BGYellow, +.-Color-Magenta-BGYellow, +.-Color-Red-BGYellow, +.-Color-White-BGYellow, +.-Color-Yellow-BGYellow { + background-color: var(--ansi-yellow); +} + +.-Color-BGBlue, +.-Color-Black-BGBlue, +.-Color-Blue-BGBlue, +.-Color-Bold-BGBlue, +.-Color-BrightBGBlue, +.-Color-Bold-Black-BGBlue, +.-Color-BrightBlack-BGBlue, +.-Color-Bold-Green-BGBlue, +.-Color-BrightGreen-BGBlue, +.-Color-Bold-Cyan-BGBlue, +.-Color-BrightCyan-BGBlue, +.-Color-Bold-Blue-BGBlue, +.-Color-BrightBlue-BGBlue, +.-Color-Bold-Magenta-BGBlue, +.-Color-BrightMagenta-BGBlue, +.-Color-Bold-Red-BGBlue, +.-Color-BrightRed-BGBlue, +.-Color-Bold-White-BGBlue, +.-Color-BrightWhite-BGBlue, +.-Color-Bold-Yellow-BGBlue, +.-Color-BrightYellow-BGBlue, +.-Color-Cyan-BGBlue, +.-Color-Green-BGBlue, +.-Color-Magenta-BGBlue, +.-Color-Red-BGBlue, +.-Color-White-BGBlue, +.-Color-Yellow-BGBlue { + background-color: var(--ansi-blue); +} + +.-Color-BGMagenta, +.-Color-Black-BGMagenta, +.-Color-Blue-BGMagenta, +.-Color-Bold-BGMagenta, +.-Color-BrightBGMagenta, +.-Color-Bold-Black-BGMagenta, +.-Color-BrightBlack-BGMagenta, +.-Color-Bold-Green-BGMagenta, +.-Color-BrightGreen-BGMagenta, +.-Color-Bold-Cyan-BGMagenta, +.-Color-BrightCyan-BGMagenta, +.-Color-Bold-Blue-BGMagenta, +.-Color-BrightBlue-BGMagenta, +.-Color-Bold-Magenta-BGMagenta, +.-Color-BrightMagenta-BGMagenta, +.-Color-Bold-Red-BGMagenta, +.-Color-BrightRed-BGMagenta, +.-Color-Bold-White-BGMagenta, +.-Color-BrightWhite-BGMagenta, +.-Color-Bold-Yellow-BGMagenta, +.-Color-BrightYellow-BGMagenta, +.-Color-Cyan-BGMagenta, +.-Color-Green-BGMagenta, +.-Color-Magenta-BGMagenta, +.-Color-Red-BGMagenta, +.-Color-White-BGMagenta, +.-Color-Yellow-BGMagenta { + background-color: var(--ansi-magenta); +} + +.-Color-BGCyan, +.-Color-Black-BGCyan, +.-Color-Blue-BGCyan, +.-Color-Bold-BGCyan, +.-Color-BrightBGCyan, +.-Color-Bold-Black-BGCyan, +.-Color-BrightBlack-BGCyan, +.-Color-Bold-Green-BGCyan, +.-Color-BrightGreen-BGCyan, +.-Color-Bold-Cyan-BGCyan, +.-Color-BrightCyan-BGCyan, +.-Color-Bold-Blue-BGCyan, +.-Color-BrightBlue-BGCyan, +.-Color-Bold-Magenta-BGCyan, +.-Color-BrightMagenta-BGCyan, +.-Color-Bold-Red-BGCyan, +.-Color-BrightRed-BGCyan, +.-Color-Bold-White-BGCyan, +.-Color-BrightWhite-BGCyan, +.-Color-Bold-Yellow-BGCyan, +.-Color-BrightYellow-BGCyan, +.-Color-Cyan-BGCyan, +.-Color-Green-BGCyan, +.-Color-Magenta-BGCyan, +.-Color-Red-BGCyan, +.-Color-White-BGCyan, +.-Color-Yellow-BGCyan { + background-color: var(--ansi-cyan); +} + +.-Color-BGWhite, +.-Color-Black-BGWhite, +.-Color-Blue-BGWhite, +.-Color-Bold-BGWhite, +.-Color-BrightBGWhite, +.-Color-Bold-Black-BGWhite, +.-Color-BrightBlack-BGWhite, +.-Color-Bold-Green-BGWhite, +.-Color-BrightGreen-BGWhite, +.-Color-Bold-Cyan-BGWhite, +.-Color-BrightCyan-BGWhite, +.-Color-Bold-Blue-BGWhite, +.-Color-BrightBlue-BGWhite, +.-Color-Bold-Magenta-BGWhite, +.-Color-BrightMagenta-BGWhite, +.-Color-Bold-Red-BGWhite, +.-Color-BrightRed-BGWhite, +.-Color-Bold-White-BGWhite, +.-Color-BrightWhite-BGWhite, +.-Color-Bold-Yellow-BGWhite, +.-Color-BrightYellow-BGWhite, +.-Color-Cyan-BGWhite, +.-Color-Green-BGWhite, +.-Color-Magenta-BGWhite, +.-Color-Red-BGWhite, +.-Color-White-BGWhite, +.-Color-Yellow-BGWhite { + background-color: var(--ansi-white); +} + +.-Color-Black, +.-Color-Bold-Black, +.-Color-BrightBlack, +.-Color-Black-BGBlack, +.-Color-Bold-Black-BGBlack, +.-Color-BrightBlack-BGBlack, +.-Color-Black-BGGreen, +.-Color-Red-BGRed, +.-Color-Bold-Red-BGRed, +.-Color-BrightRed-BGRed, +.-Color-Bold-Blue-BGBlue, +.-Color-BrightBlue-BGBlue, +.-Color-Blue-BGBlue { + text-shadow: 0 0 1px var(--ansi-white); +} + +.-Color-Bold-Cyan-BGCyan, +.-Color-BrightCyan-BGCyan, +.-Color-Bold-Magenta-BGMagenta, +.-Color-BrightMagenta-BGMagenta, +.-Color-Bold-White, +.-Color-BrightWhite, +.-Color-Bold-Yellow-BGYellow, +.-Color-BrightYellow-BGYellow, +.-Color-Bold-Green-BGGreen, +.-Color-BrightGreen-BGGreen, +.-Color-Cyan-BGCyan, +.-Color-Cyan-BGGreen, +.-Color-Green-BGCyan, +.-Color-Green-BGGreen, +.-Color-Magenta-BGMagenta, +.-Color-White, +.-Color-White-BGWhite, +.-Color-Yellow-BGYellow { + text-shadow: 0 0 1px var(--ansi-black); +} \ No newline at end of file diff --git a/assets/_markdown_exec_pyodide.css b/assets/_markdown_exec_pyodide.css new file mode 100644 index 00000000..71f9f285 --- /dev/null +++ b/assets/_markdown_exec_pyodide.css @@ -0,0 +1,50 @@ +html[data-theme="light"] { + @import "https://cdn.jsdelivr.net/npm/highlightjs-themes@1.0.0/tomorrow.css" +} + +html[data-theme="dark"] { + @import "https://cdn.jsdelivr.net/npm/highlightjs-themes@1.0.0/tomorrow-night-blue.min.css" +} + + +.ace_gutter { + z-index: 1; +} + +.pyodide-editor { + width: 100%; + min-height: 200px; + max-height: 400px; + font-size: .85em; +} + +.pyodide-editor-bar { + color: var(--md-primary-bg-color); + background-color: var(--md-primary-fg-color); + width: 100%; + font: monospace; + font-size: 0.75em; + padding: 2px 0 2px; +} + +.pyodide-bar-item { + padding: 0 18px 0; + display: inline-block; + width: 50%; +} + +.pyodide pre { + margin: 0; +} + +.pyodide-output { + width: 100%; + margin-bottom: -15px; + min-height: 46px; + max-height: 400px +} + +.pyodide-clickable { + cursor: pointer; + text-align: right; +} \ No newline at end of file diff --git a/assets/_markdown_exec_pyodide.js b/assets/_markdown_exec_pyodide.js new file mode 100644 index 00000000..1f6ae91b --- /dev/null +++ b/assets/_markdown_exec_pyodide.js @@ -0,0 +1,109 @@ +var _sessions = {}; + +function getSession(name, pyodide) { + if (!(name in _sessions)) { + _sessions[name] = pyodide.globals.get("dict")(); + } + return _sessions[name]; +} + +function writeOutput(element, string) { + element.innerHTML += string + '\n'; +} + +function clearOutput(element) { + element.innerHTML = ''; +} + +async function evaluatePython(pyodide, editor, output, session) { + pyodide.setStdout({ batched: (string) => { writeOutput(output, string); } }); + let result, code = editor.getValue(); + clearOutput(output); + try { + result = await pyodide.runPythonAsync(code, { globals: getSession(session, pyodide) }); + } catch (error) { + writeOutput(output, error); + } + if (result) writeOutput(output, result); + hljs.highlightElement(output); +} + +async function initPyodide() { + try { + let pyodide = await loadPyodide(); + await pyodide.loadPackage("micropip"); + return pyodide; + } catch(error) { + return null; + } +} + +function getTheme() { + return document.body.getAttribute('data-md-color-scheme'); +} + +function setTheme(editor, currentTheme, light, dark) { + // https://gist.github.com/RyanNutt/cb8d60997d97905f0b2aea6c3b5c8ee0 + if (currentTheme === "default") { + editor.setTheme("ace/theme/" + light); + document.querySelector(`link[title="light"]`).removeAttribute("disabled"); + document.querySelector(`link[title="dark"]`).setAttribute("disabled", "disabled"); + } else if (currentTheme === "slate") { + editor.setTheme("ace/theme/" + dark); + document.querySelector(`link[title="dark"]`).removeAttribute("disabled"); + document.querySelector(`link[title="light"]`).setAttribute("disabled", "disabled"); + } +} + +function updateTheme(editor, light, dark) { + // Create a new MutationObserver instance + const observer = new MutationObserver((mutations) => { + // Loop through the mutations that occurred + mutations.forEach((mutation) => { + // Check if the mutation was a change to the data-md-color-scheme attribute + if (mutation.attributeName === 'data-md-color-scheme') { + // Get the new value of the attribute + const newColorScheme = mutation.target.getAttribute('data-md-color-scheme'); + // Update the editor theme + setTheme(editor, newColorScheme, light, dark); + } + }); + }); + + // Configure the observer to watch for changes to the data-md-color-scheme attribute + observer.observe(document.body, { + attributes: true, + attributeFilter: ['data-md-color-scheme'], + }); +} + +async function setupPyodide(idPrefix, install = null, themeLight = 'tomorrow', themeDark = 'tomorrow_night', session = null) { + const editor = ace.edit(idPrefix + "editor"); + const run = document.getElementById(idPrefix + "run"); + const clear = document.getElementById(idPrefix + "clear"); + const output = document.getElementById(idPrefix + "output"); + + updateTheme(editor, themeLight, themeDark); + + editor.session.setMode("ace/mode/python"); + setTheme(editor, getTheme(), themeLight, themeDark); + + writeOutput(output, "Initializing..."); + let pyodide = await pyodidePromise; + if (install && install.length) { + micropip = pyodide.pyimport("micropip"); + for (const package of install) + await micropip.install(package); + } + clearOutput(output); + run.onclick = () => evaluatePython(pyodide, editor, output, session); + clear.onclick = () => clearOutput(output); + output.parentElement.parentElement.addEventListener("keydown", (event) => { + if (event.ctrlKey && event.key.toLowerCase() === 'enter') { + event.preventDefault(); + run.click(); + } + }); +} + +var pyodidePromise = initPyodide(); diff --git a/assets/_mkdocstrings.css b/assets/_mkdocstrings.css new file mode 100644 index 00000000..e69de29b diff --git a/batch/index.html b/batch/index.html new file mode 100644 index 00000000..3cf728de --- /dev/null +++ b/batch/index.html @@ -0,0 +1,583 @@ + + + + + + + + The Batch System - Introduction to Kebnekaise + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
    +
  • + +
  • +
  • +
+
+
+
+
+ +

Compiling and Linking with Libraries

+

\begin{block}{} + \justify +\begin{small} +Figuring out how to link +\end{small} + \end{block}

+

\begin{block}{} + \begin{itemize} + \item Intel and Intel MKL linking: \ +\begin{tiny} +\texttt{https://software.intel.com/en-us/articles/intel-mkl-link-line-advisor} +\end{tiny} + \item GCC, etc. \textbf{Use buildenv} + \begin{itemize} + \item After loading a compiler toolchain, load \texttt{‘buildenv’} and use \texttt{‘ml show buildenv’} to get useful linking info + \item Example, foss (add relevant version): \ +\vspace{2mm} + \texttt{ml foss/version} \ + \texttt{ml buildenv} \ + \texttt{ml show buildenv} +\vspace{2mm} +\item Using the environment variable (prefaced with $) for linking is highly recommended! + \item You have to load the buildenv module in order to use the environment variable for linking! + \end{itemize} + \end{itemize} + \end{block} +}

+

\frame{\frametitle{The Batch System (SLURM)}

+

\begin{block}{} + \begin{itemize} + \item Large/long/parallel jobs \textbf{must} be run through the batch system + \item SLURM is an Open Source job scheduler, which provides three key functions + \begin{itemize} + \item Keeps track of available system resources + \item Enforces local system resource usage and job scheduling policies + \item Manages a job queue, distributing work across resources according to policies + \end{itemize} + \item In order to run a batch job, you need to create and submit a +SLURM submit file (also called a batch submit file, a batch +script, or a job script). +\item Guides and documentation at: http://www.hpc2n.umu.se/support + \end{itemize} + \end{block} +}

+

\frame{\frametitle{The Batch System}\framesubtitle{Accounting, Compute nodes, Kebnekaise}

+

\begin{block}{} + \begin{small} + Here the Skylake nodes are used as an example. The only difference for the Broadwell nodes is that it would say 128G instead of 192G per node. + \end{small} + \end{block}

+

\begin{block}{} +\begin{center} +\includegraphics[width=9cm]{figures/Allocation-Kebnekaise-thin_skylake.png} +\end{center} + \end{block} +}

+

\frame{\frametitle{The Batch System}\framesubtitle{Accounting, largemem nodes, Kebnekaise}

+

\begin{block}{} +\begin{center} +\includegraphics[width=10cm]{figures/Allocation-Kebnekaise-largemem_v3.png} +\end{center} + \end{block} +}

+

\frame{\frametitle{The Batch System}\framesubtitle{Accounting, K80 GPU nodes, Kebnekaise.}

+
\begin{block}{}
+\begin{footnotesize}
+  The K80 GPU cards have 2 onboard compute engines (GK210 chips). Most GPU nodes have 2 K80s, placed together as 14 cores + 1 K80/socket. 4 GPU nodes have 4 K80 GPU cards. 
+\end{footnotesize}
+
+

\end{block}

+

\begin{block}{} +\begin{center} +\includegraphics[width=5.8cm]{figures/K80-GPUs.png} +\end{center} + \end{block} +}

+

\frame{\frametitle{The Batch System}\framesubtitle{Accounting, V100 GPU nodes, Kebnekaise.}

+
\begin{block}{}
+\begin{scriptsize}
+  Each V100 GPU accelerator card has 1 onboard compute engine (GV100 chip). They are placed together as 14 cores + 1 V100 on a socket (28 cores, 2 V100s per node).  
+\end{scriptsize}
+
+

\end{block}

+

\begin{block}{} +\begin{center} +\includegraphics[width=6.8cm]{figures/V100-allocation-new.png} +\end{center} + \end{block} +}

+

\frame{\frametitle{The Batch System}\framesubtitle{Accounting, A100 GPU nodes, Kebnekaise.}

+
\begin{block}{}
+\begin{scriptsize}
+  Each A100 GPU accelerator card has 1 onboard compute engine. The AMD Zen3 nodes have 2 CPUs sockets with 24 cores each, for a total of 48 cores, and 2 NVidia A100 GPUs. They are placed together as 24 cores + 1 A100 on a socket. 
+\end{scriptsize}
+
+

\end{block}

+

\begin{block}{} +\begin{center} +\includegraphics[width=6.8cm]{figures/A100-allocation.png} +\end{center} + \end{block} +}

+

\frame{\frametitle{The Batch System (SLURM)}\framesubtitle{Useful Commands}

+

\begin{block}{} + \begin{itemize} + \begin{footnotesize} + \item Submit job: \texttt{sbatch \(<\)jobscript\(>\)} + \item Get list of your jobs: \texttt{squeue -u \(<\)username\(>\)} + \item \texttt{srun \(<\)commands for your job/program\(>\)} + \item Check on a specific job: \texttt{scontrol show job \(<\)job id\(>\)} + \item Delete a specific job: \texttt{scancel \(<\)job id\(>\)} + \item Delete all your own jobs: \texttt{scancel -u \(<\)user\(>\)} + \item More detailed info about jobs: \ + \end{footnotesize} + \begin{scriptsize}
+ \texttt{sacct -l -j \(<\)jobid\(>\) -o jobname,NTasks,nodelist,MaxRSS,MaxVMSize…} + \end{scriptsize} + \begin{itemize} + \begin{footnotesize} + \item More flags can be found with \texttt{man sacct} + \item The output will be \textbf{very} wide. To view, use \ + \texttt{sacct -l -j ....... | less -S} \ + (makes it sideways scrollable, using the left/right arrow key) + \end{footnotesize} + \end{itemize} + \begin{footnotesize} + \item Web url with graphical info about a job: \texttt{job-usage \(<\)job-id\(>\)} + \end{footnotesize} + \end{itemize} + Use \texttt{man sbatch, man srun, man ....} for more information + \end{block} +}

+

\frame{\frametitle{The Batch System (SLURM)}\framesubtitle{Job Output}

+

\begin{block}{} + \begin{itemize} + \item Output and errors in: \ +\texttt{slurm-\(<\)job id\(>\).out} + \item Look at it with vi, nano, emacs, cat, less… + \item To get output and error files split up, you can give these flags in the submit script: \ +\texttt{#SBATCH –error=job.\%J.err} \ +\texttt{#SBATCH –output=job.\%J.out} + \end{itemize} + \end{block} +}

+

\frame{\frametitle{The Batch System (SLURM)}\framesubtitle{Using different parts of Kebnekaise}

+

\begin{block}{} + \begin{scriptsize} + \begin{itemize} + \item Use the ‘fat’ nodes by adding this flag to your script: \ + \texttt{#SBATCH -p largemem} (separate resource) \ + \item Specifying Intel Broadwell, Intel Skylake, or AMD Zen3 CPUs: \ + \texttt{#SBATCH –constraint=broadwell} \ + or \ + \texttt{#SBATCH –constraint=skylake} \ + or \ + \texttt{#SBATCH –constraint=zen3} \ + \item Using the GPU nodes (separate resource): \ + \texttt{#SBATCH –gres=gpu:\(<\)type-of-card\(>\):x} where \(<\)type-of-card\(>\) is either k80, v100, or a100 and x = 1, 2, or 4 (4 only for K80). \ + \begin{itemize} + \begin{scriptsize} + \item In the case of the A100 GPU nodes, you also need to add a partition \ + \texttt{#SBATCH -p amd_gpu} + \end{scriptsize} + \end{itemize} + \item Use the AMD login node for correct modules and compilers for AMD Zen3 and A100 nodes: \ \texttt{kebnekaise-amd-tl.hpc2n.umu.se} or \\texttt{kebnekaise-amd.hpc2n.umu.se} + \end{itemize} + More on https://www.hpc2n.umu.se/documentation/guides/using_kebnekaise + \end{scriptsize} + \end{block} + }

+

\frame{\frametitle{The Batch System (SLURM)}\framesubtitle{Simple example, serial}

+

\begin{block}{} + \justify +\begin{footnotesize} +Example: Serial job on Kebnekaise, compiler toolchain ‘foss’ +\end{footnotesize} + \end{block}

+

\begin{block}{} +\begin{footnotesize} +\texttt{#!/bin/bash} \ +\texttt{# Project id - change to your own after the course!} \ +\texttt{#SBATCH -A hpc2n2023-132} \ +\texttt{# Asking for 1 core} \ +\texttt{#SBATCH -n 1} \ +\texttt{# Asking for a walltime of 5 min} \ +\texttt{#SBATCH –time=00:05:00} \ +\vspace{3mm} +\texttt{# Purge modules before loading new ones in a script. } \ +\texttt{ml purge > /dev/null 2>\&1} \ +\texttt{ml foss/2021b} \ +\vspace{3mm} +\texttt{./my_serial_program} +\end{footnotesize} + \end{block}

+

\begin{block}{} + \justify +\begin{footnotesize} +Submit with: \ +\texttt{sbatch \(<\)jobscript\(>\)} +\end{footnotesize} + \end{block}

+

}

+

\frame{\frametitle{The Batch System (SLURM)}\framesubtitle{Example, MPI C program}

+

\begin{block}{} + \begin{footnotesize} + \texttt{#include \(<\)stdio.h\(>\)} \ +\texttt{#include \(<\)mpi.h\(>\)} \ +\vspace{3mm} +\texttt{int main (int argc, char *argv[]) {} \ +\vspace{3mm} +\texttt{int myrank, size;} \ +\vspace{3mm} +\texttt{MPI_Init(\&argc, \&argv);} \ +\texttt{MPI_Comm_rank(MPI_COMM_WORLD, \&myrank);} \ +\texttt{MPI_Comm_size(MPI_COMM_WORLD, \&size);} \ +\vspace{3mm} +\texttt{printf(“Processor \%d of \%d: Hello World!\textbackslash n”, myrank, size);} \ +\vspace{3mm} +\texttt{MPI_Finalize();} +\vspace{3mm} +\texttt{}} + \end{footnotesize} + \end{block}

+

}

+

\frame{\frametitle{The Batch System (SLURM)}\framesubtitle{Simple example, parallel}

+

\begin{block}{} + \justify + \begin{footnotesize} + Example: MPI job on Kebnekaise, compiler toolchain ‘foss’ + \end{footnotesize} +\end{block}

+

\begin{block}{} +\begin{footnotesize} +\texttt{#!/bin/bash} \ +\texttt{#SBATCH -A hpc2n2023-132} \ +\texttt{#SBATCH -n 14} \ +\texttt{#SBATCH –time=00:05:00} \ +\texttt{##SBATCH –exclusive} \ +\texttt{#SBATCH –reservation=intro-gpu} \ +\vspace{3mm} +\texttt{module purge > /dev/null 2>\&1} \ +\texttt{ml foss/2021b} \ +\vspace{3mm} +\texttt{srun ./my_parallel_program} +\end{footnotesize} + \end{block}

+

}

+

\begin{frame}[fragile]\frametitle{The Batch System (SLURM)}\framesubtitle{Simple example, output}

+

\begin{block}{} + \justify +Example: Output from a MPI job on Kebnekaise, run on 14 cores (one NUMA island) + \end{block}

+

\begin{block}{} +\begin{tiny} +\begin{verbatim} +b-an01 [~/slurm]$ cat slurm-15952.out

+

Processor 12 of 14: Hello World! +Processor 5 of 14: Hello World! +Processor 9 of 14: Hello World! +Processor 4 of 14: Hello World! +Processor 11 of 14: Hello World! +Processor 13 of 14: Hello World! +Processor 0 of 14: Hello World! +Processor 1 of 14: Hello World! +Processor 2 of 14: Hello World! +Processor 3 of 14: Hello World! +Processor 6 of 14: Hello World! +Processor 7 of 14: Hello World! +Processor 8 of 14: Hello World! +Processor 10 of 14: Hello World! +\end{verbatim} +\end{tiny} + \end{block}

+

\end{frame}

+

\frame{\frametitle{The Batch System (SLURM)}\framesubtitle{Starting more than one serial job in the same submit file}

+

\begin{block}{} +\begin{small} +\texttt{#!/bin/bash} \ +\texttt{#SBATCH -A hpc2n2023-132} \ +\texttt{#SBATCH -n 5} \ +\texttt{#SBATCH –time=00:15:00} \ +\vspace{3mm} +\texttt{module purge > /dev/null 2>\&1} \ +\texttt{ml foss/2021b} \ +\vspace{3mm} +\texttt{srun -n 1 ./job1.batch \&} \ +\texttt{srun -n 1 ./job2.batch \&} \ +\texttt{srun -n 1 ./job3.batch \&} \ +\texttt{srun -n 1 ./job4.batch \&} \ +\texttt{srun -n 1 ./job5.batch } \ +\texttt{wait} \ +\end{small} + \end{block}

+

}

+

\frame{\frametitle{The Batch System (SLURM)}\framesubtitle{Multiple Parallel Jobs Sequentially}

+

\begin{block}{} +\begin{scriptsize} +\texttt{#!/bin/bash} \ +\texttt{#SBATCH -A hpc2n2023-132} \ +\texttt{#SBATCH -c 28} \ +\texttt{# Remember to ask for enough time for all jobs to complete} \ +\texttt{#SBATCH –time=02:00:00} \ +\vspace{3mm} +\texttt{module purge > /dev/null 2>\&1} \ +\texttt{ml foss/2021b} \ +\vspace{3mm} +\texttt{# Here 14 tasks with 2 cores per task. Output to file.} \ +\texttt{# Not needed if your job creates output in a file} \ +\texttt{# I also copy the output somewhere else and then run} \ +\texttt{# another executable…} \ +\vspace{3mm} +\texttt{srun -n 14 -c 2 ./a.out > myoutput1 2>\&1} \ +\texttt{cp myoutput1 /pfs/nobackup/home/u/username/mydatadir} \ +\texttt{srun -n 14 -c 2 ./b.out > myoutput2 2>\&1} \ +\texttt{cp myoutput2 /pfs/nobackup/home/u/username/mydatadir} \ +\texttt{srun -n 14 -c 2 ./c.out > myoutput3 2>\&1} \ +\texttt{cp myoutput3 /pfs/nobackup/home/u/username/mydatadir} \ +\end{scriptsize} + \end{block}

+

}

+

\frame{\frametitle{The Batch System (SLURM)}\framesubtitle{Multiple Parallel Jobs Simultaneously}

+
\[\begin{footnotesize} +Make sure you ask for enough cores that all jobs can run at the same time, and have enough memory. Of course, this will also work for serial jobs - just remove the srun from the command line. +\end{footnotesize}\]
+

\begin{block}{} +\begin{footnotesize} +\texttt{#!/bin/bash} \ +\texttt{#SBATCH -A hpc2n2023-132} \ +\texttt{# Total number of cores the jobs need} \ +\texttt{#SBATCH -n 56} \ +\texttt{# Remember to ask for enough time for all of the jobs to} \ +\texttt{# complete, even the longest} \ +\texttt{#SBATCH –time=02:00:00} \ +\vspace{3mm} +\texttt{module purge > /dev/null 2>\&1} \ +\texttt{ml foss/2021b} \ +\vspace{3mm} +\texttt{srun -n 14 –cpu_bind=cores ./a.out \&} \ +\texttt{srun -n 28 –cpu_bind=cores ./b.out \&} \ +\texttt{srun -n 14 –cpu_bind=cores ./c.out \&} \ +\texttt{…} \ +\texttt{wait} \ +\end{footnotesize} + \end{block}

+

}

+

\frame{\frametitle{The Batch System (SLURM)}\framesubtitle{GPU Job - V100}

+

\begin{block}{} +\begin{footnotesize} +\texttt{#!/bin/bash} \ +\texttt{#SBATCH -A hpc2n2023-132} \ +\texttt{# Expected time for job to complete} \ +\texttt{#SBATCH –time=00:10:00} \ +\texttt{# Number of GPU cards needed. Here asking for 2 V100 cards} \ +\texttt{#SBATCH –gres=v100:2} \ +\vspace{3mm} +\texttt{module purge > /dev/null 2>\&1} \ +\texttt{# Change to modules needed for your program} \ +\texttt{ml fosscuda/2021b} \ +\vspace{3mm} +\texttt{./my-cuda-program} \ +\end{footnotesize} + \end{block}

+

}

+

\frame{\frametitle{The Batch System (SLURM)}\framesubtitle{GPU Job - A100}

+

\begin{block}{} +\begin{footnotesize} +\texttt{#!/bin/bash} \ +\texttt{#SBATCH -A hpc2n2023-132} \ +\texttt{# Expected time for job to complete} \ +\texttt{#SBATCH –time=00:10:00} \ +\texttt{# Adding the partition for the A100 GPUs} \ +\texttt{#SBATCH -p amd_gpu} \ +\texttt{# Number of GPU cards needed. Here asking for 2 A100 cards} \ +\texttt{#SBATCH –gres=a100:2} \ +\vspace{3mm} +\texttt{module purge > /dev/null 2>\&1} \ +\texttt{# Change to modules needed for your software - remember to login} \ +\texttt{# to kebnekaise-amd.hpc2n.umu.se or} \ +\texttt{# kebnekaise-amd-tl.hpc2n.umu.se login node to see availability} \ +\texttt{ml CUDA/11.7.0} \ +\vspace{3mm} +\texttt{./my-cuda-program} \ +\end{footnotesize} + \end{block}

+

}

+

\frame{\frametitle{Important information}

+

\begin{block}{} + \begin{itemize} + \begin{small} + \item The course project has the following project ID: hpc2n2023-132 + \item In order to use it in a batch job, add this to the batch script: + \begin{itemize} + \begin{small} + \item #SBATCH -A hpc2n2023-132 + \end{small} + \end{itemize} + \item There is a reservation with one A100 GPU node reserved for the course, in order to let us run small GPU examples without having to wait for too long. The reservation also is for one Broadwell CPU node. + \item The reservation is ONLY valid during the course: + \begin{itemize} + \begin{small} + \item intro-gpu \ (add with #SBATCH –reservation=intro-gpu) + \end{small} + \item To use the reservation with the A100 GPU node, also add \texttt{#SBATCH -p amd_gpu} and \texttt{#SBATCH –gres=a100:x (for x=1,2)}. + \end{itemize} + \item We have a storage project linked to the compute project. It is hpc2n2023-132. You find it in /proj/nobackup/hpc2n2023-132. Remember to create your own directory under it. + \end{small} + \end{itemize} + \end{block}

+

}

+

\frame{\frametitle{Questions and support}

+

\begin{block}{} + \textbf{Questions?} Now: Ask me or one of the other support or application experts present.

+
\vspace{0.5cm}
+OR 
+\vspace{0.5cm}
+
+\begin{itemize}
+\item Documentation: \texttt{https://www.hpc2n.umu.se/support}
+\item Support questions to: \texttt{https://supr.naiss.se/support/} or \texttt{support@hpc2n.umu.se}
+\end{itemize}
+
+

\end{block}

+

}

+

\end{document}

+ +
+
+ +
+
+ +
+ +
+ +
+ + + + « Previous + + + +
+ + + + + + + + + + + diff --git a/compilers/index.html b/compilers/index.html new file mode 100644 index 00000000..64012627 --- /dev/null +++ b/compilers/index.html @@ -0,0 +1,699 @@ + + + + + + + + Compiling - Introduction to Kebnekaise + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
    +
  • + +
  • +
  • +
+
+
+
+
+ +

Compiling and Linking with Libraries

+
+

Objectives

+
    +
  • Learn about the compilers at HPC2N
      +
    • How to load the compiler toolchains
    • +
    • How to use the compilers
    • +
    • What are the popular flags
    • +
    +
  • +
  • How to link with libraries.
  • +
+
+

Installed compilers

+

There are compilers available for Fortran 77, Fortran 90, Fortran 95, C, and C++. The compilers can produce both general-purpose code and architecture-specific optimized code to improve performance (loop-level optimizations, inter-procedural analysis and cache optimizations).

+

Loading compilers

+
+

Note

+

You need to load a compiler suite (and possibly libraries, depending on what you need) before you can compile and link.

+
+

Use ml av to get a list of available compiler toolchains as mentioned in the modules - compiler toolchains section.

+

You load a compiler toolchain the same way you load any other module. They are always available directly, without the need to load prerequisites first.

+
+

Example: Loading foss/2023b

+

This compiler toolchain contains: GCC/13.2.0, BLAS (with LAPACK, ScaLAPACK, and FFTW.

+
b-an01 [~]$ ml foss/2023b
+b-an01 [~]$ ml
+
+Currently Loaded Modules:
+  1) snicenvironment (S)   7) numactl/2.0.16     13) libevent/2.1.12  19) FlexiBLAS/3.3.1
+  2) systemdefault   (S)   8) XZ/5.4.4           14) UCX/1.15.0       20) FFTW/3.3.10
+  3) GCCcore/13.2.0        9) libxml2/2.11.5     15) PMIx/4.2.6       21) FFTW.MPI/3.3.10
+  4) zlib/1.2.13          10) libpciaccess/0.17  16) UCC/1.2.0        22) ScaLAPACK/2.2.0-fb
+  5) binutils/2.40        11) hwloc/2.9.2        17) OpenMPI/4.1.6    23) foss/2023b
+  6) GCC/13.2.0           12) OpenSSL/1.1        18) OpenBLAS/0.3.24
+
+  Where:
+   S:  Module is Sticky, requires --force to unload or purge
+
+
+
+b-an01 [~]$ 
+
+
+

Compiling

+

Compiling with GCC

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
LanguageCompiler nameMPI
Fortran77gfortranmpif77
Fortran90gfortranmpif90
Fortran95gfortranN/A
Cgccmpicc
C++g++mpiCC
+
+

Example: compiling hello.c

+

You can find the file hello.c in the exercises directory, in the subdirectory “simple”. Or you can download it here: <a href=”

+
+

Compiling with Intel +Language Compiler name MPI +Fortran77 ifort mpiifort +Fortran90 ifort mpiifort +Fortran95 ifort N/A +C icc mpiicc +C++ icpc mpiicc

+

Build environment

+
Using a compiler toolchain by itself is possible but requires a fair bit of manual work, figuring out which paths to add to -I or -L ifor including files and libraries, and similar.
+
+

To make life as a software builder easier there is a special module available, buildenv, that can be loaded on top of any toolchain. If it is missing for some toolchain, send a mail to support@hpc2n.umu.se and let us know.

+

This module defines a large number of environment variables with the relevant settings for the used toolchain. Among other things it sets CC, CXX, F90, FC, MPICC, MPICXX, MPIF90, CFLAGS, FFLAGS, and much more.

+

To see all of them, after loading a toolchain do:

+

ml show buildenv

+

Depending on the software one can use these environment variables to set related makefile variables or cmake defines, or just use them for guidelines on what to use in makefiles etc.

+

Exactly how to use them depends on the softwares build system.

+
\justify
+
+

\begin{small} +Figuring out how to link +\end{small} + \end{block}

+

\begin{block}{} + \begin{itemize} + \item Intel and Intel MKL linking: \ +\begin{tiny} +\texttt{https://software.intel.com/en-us/articles/intel-mkl-link-line-advisor} +\end{tiny} + \item GCC, etc. \textbf{Use buildenv} + \begin{itemize} + \item After loading a compiler toolchain, load \texttt{‘buildenv’} and use \texttt{‘ml show buildenv’} to get useful linking info + \item Example, foss (add relevant version): \ +\vspace{2mm} + \texttt{ml foss/version} \ + \texttt{ml buildenv} \ + \texttt{ml show buildenv} +\vspace{2mm} +\item Using the environment variable (prefaced with $) for linking is highly recommended! + \item You have to load the buildenv module in order to use the environment variable for linking! + \end{itemize} + \end{itemize} + \end{block} +}

+

\frame{\frametitle{The Batch System (SLURM)}

+

\begin{block}{} + \begin{itemize} + \item Large/long/parallel jobs \textbf{must} be run through the batch system + \item SLURM is an Open Source job scheduler, which provides three key functions + \begin{itemize} + \item Keeps track of available system resources + \item Enforces local system resource usage and job scheduling policies + \item Manages a job queue, distributing work across resources according to policies + \end{itemize} + \item In order to run a batch job, you need to create and submit a +SLURM submit file (also called a batch submit file, a batch +script, or a job script). +\item Guides and documentation at: http://www.hpc2n.umu.se/support + \end{itemize} + \end{block} +}

+

\frame{\frametitle{The Batch System}\framesubtitle{Accounting, Compute nodes, Kebnekaise}

+

\begin{block}{} + \begin{small} + Here the Skylake nodes are used as an example. The only difference for the Broadwell nodes is that it would say 128G instead of 192G per node. + \end{small} + \end{block}

+

\begin{block}{} +\begin{center} +\includegraphics[width=9cm]{figures/Allocation-Kebnekaise-thin_skylake.png} +\end{center} + \end{block} +}

+

\frame{\frametitle{The Batch System}\framesubtitle{Accounting, largemem nodes, Kebnekaise}

+

\begin{block}{} +\begin{center} +\includegraphics[width=10cm]{figures/Allocation-Kebnekaise-largemem_v3.png} +\end{center} + \end{block} +}

+

\frame{\frametitle{The Batch System}\framesubtitle{Accounting, K80 GPU nodes, Kebnekaise.}

+
\begin{block}{}
+\begin{footnotesize}
+  The K80 GPU cards have 2 onboard compute engines (GK210 chips). Most GPU nodes have 2 K80s, placed together as 14 cores + 1 K80/socket. 4 GPU nodes have 4 K80 GPU cards. 
+\end{footnotesize}
+
+

\end{block}

+

\begin{block}{} +\begin{center} +\includegraphics[width=5.8cm]{figures/K80-GPUs.png} +\end{center} + \end{block} +}

+

\frame{\frametitle{The Batch System}\framesubtitle{Accounting, V100 GPU nodes, Kebnekaise.}

+
\begin{block}{}
+\begin{scriptsize}
+  Each V100 GPU accelerator card has 1 onboard compute engine (GV100 chip). They are placed together as 14 cores + 1 V100 on a socket (28 cores, 2 V100s per node).  
+\end{scriptsize}
+
+

\end{block}

+

\begin{block}{} +\begin{center} +\includegraphics[width=6.8cm]{figures/V100-allocation-new.png} +\end{center} + \end{block} +}

+

\frame{\frametitle{The Batch System}\framesubtitle{Accounting, A100 GPU nodes, Kebnekaise.}

+
\begin{block}{}
+\begin{scriptsize}
+  Each A100 GPU accelerator card has 1 onboard compute engine. The AMD Zen3 nodes have 2 CPUs sockets with 24 cores each, for a total of 48 cores, and 2 NVidia A100 GPUs. They are placed together as 24 cores + 1 A100 on a socket. 
+\end{scriptsize}
+
+

\end{block}

+

\begin{block}{} +\begin{center} +\includegraphics[width=6.8cm]{figures/A100-allocation.png} +\end{center} + \end{block} +}

+

\frame{\frametitle{The Batch System (SLURM)}\framesubtitle{Useful Commands}

+

\begin{block}{} + \begin{itemize} + \begin{footnotesize} + \item Submit job: \texttt{sbatch \(<\)jobscript\(>\)} + \item Get list of your jobs: \texttt{squeue -u \(<\)username\(>\)} + \item \texttt{srun \(<\)commands for your job/program\(>\)} + \item Check on a specific job: \texttt{scontrol show job \(<\)job id\(>\)} + \item Delete a specific job: \texttt{scancel \(<\)job id\(>\)} + \item Delete all your own jobs: \texttt{scancel -u \(<\)user\(>\)} + \item More detailed info about jobs: \ + \end{footnotesize} + \begin{scriptsize}
+ \texttt{sacct -l -j \(<\)jobid\(>\) -o jobname,NTasks,nodelist,MaxRSS,MaxVMSize…} + \end{scriptsize} + \begin{itemize} + \begin{footnotesize} + \item More flags can be found with \texttt{man sacct} + \item The output will be \textbf{very} wide. To view, use \ + \texttt{sacct -l -j ....... | less -S} \ + (makes it sideways scrollable, using the left/right arrow key) + \end{footnotesize} + \end{itemize} + \begin{footnotesize} + \item Web url with graphical info about a job: \texttt{job-usage \(<\)job-id\(>\)} + \end{footnotesize} + \end{itemize} + Use \texttt{man sbatch, man srun, man ....} for more information + \end{block} +}

+

\frame{\frametitle{The Batch System (SLURM)}\framesubtitle{Job Output}

+

\begin{block}{} + \begin{itemize} + \item Output and errors in: \ +\texttt{slurm-\(<\)job id\(>\).out} + \item Look at it with vi, nano, emacs, cat, less… + \item To get output and error files split up, you can give these flags in the submit script: \ +\texttt{#SBATCH –error=job.\%J.err} \ +\texttt{#SBATCH –output=job.\%J.out} + \end{itemize} + \end{block} +}

+

\frame{\frametitle{The Batch System (SLURM)}\framesubtitle{Using different parts of Kebnekaise}

+

\begin{block}{} + \begin{scriptsize} + \begin{itemize} + \item Use the ‘fat’ nodes by adding this flag to your script: \ + \texttt{#SBATCH -p largemem} (separate resource) \ + \item Specifying Intel Broadwell, Intel Skylake, or AMD Zen3 CPUs: \ + \texttt{#SBATCH –constraint=broadwell} \ + or \ + \texttt{#SBATCH –constraint=skylake} \ + or \ + \texttt{#SBATCH –constraint=zen3} \ + \item Using the GPU nodes (separate resource): \ + \texttt{#SBATCH –gres=gpu:\(<\)type-of-card\(>\):x} where \(<\)type-of-card\(>\) is either k80, v100, or a100 and x = 1, 2, or 4 (4 only for K80). \ + \begin{itemize} + \begin{scriptsize} + \item In the case of the A100 GPU nodes, you also need to add a partition \ + \texttt{#SBATCH -p amd_gpu} + \end{scriptsize} + \end{itemize} + \item Use the AMD login node for correct modules and compilers for AMD Zen3 and A100 nodes: \ \texttt{kebnekaise-amd-tl.hpc2n.umu.se} or \\texttt{kebnekaise-amd.hpc2n.umu.se} + \end{itemize} + More on https://www.hpc2n.umu.se/documentation/guides/using_kebnekaise + \end{scriptsize} + \end{block} + }

+

\frame{\frametitle{The Batch System (SLURM)}\framesubtitle{Simple example, serial}

+

\begin{block}{} + \justify +\begin{footnotesize} +Example: Serial job on Kebnekaise, compiler toolchain ‘foss’ +\end{footnotesize} + \end{block}

+

\begin{block}{} +\begin{footnotesize} +\texttt{#!/bin/bash} \ +\texttt{# Project id - change to your own after the course!} \ +\texttt{#SBATCH -A hpc2n2023-132} \ +\texttt{# Asking for 1 core} \ +\texttt{#SBATCH -n 1} \ +\texttt{# Asking for a walltime of 5 min} \ +\texttt{#SBATCH –time=00:05:00} \ +\vspace{3mm} +\texttt{# Purge modules before loading new ones in a script. } \ +\texttt{ml purge > /dev/null 2>\&1} \ +\texttt{ml foss/2021b} \ +\vspace{3mm} +\texttt{./my_serial_program} +\end{footnotesize} + \end{block}

+

\begin{block}{} + \justify +\begin{footnotesize} +Submit with: \ +\texttt{sbatch \(<\)jobscript\(>\)} +\end{footnotesize} + \end{block}

+

}

+

\frame{\frametitle{The Batch System (SLURM)}\framesubtitle{Example, MPI C program}

+

\begin{block}{} + \begin{footnotesize} + \texttt{#include \(<\)stdio.h\(>\)} \ +\texttt{#include \(<\)mpi.h\(>\)} \ +\vspace{3mm} +\texttt{int main (int argc, char *argv[]) {} \ +\vspace{3mm} +\texttt{int myrank, size;} \ +\vspace{3mm} +\texttt{MPI_Init(\&argc, \&argv);} \ +\texttt{MPI_Comm_rank(MPI_COMM_WORLD, \&myrank);} \ +\texttt{MPI_Comm_size(MPI_COMM_WORLD, \&size);} \ +\vspace{3mm} +\texttt{printf(“Processor \%d of \%d: Hello World!\textbackslash n”, myrank, size);} \ +\vspace{3mm} +\texttt{MPI_Finalize();} +\vspace{3mm} +\texttt{}} + \end{footnotesize} + \end{block}

+

}

+

\frame{\frametitle{The Batch System (SLURM)}\framesubtitle{Simple example, parallel}

+

\begin{block}{} + \justify + \begin{footnotesize} + Example: MPI job on Kebnekaise, compiler toolchain ‘foss’ + \end{footnotesize} +\end{block}

+

\begin{block}{} +\begin{footnotesize} +\texttt{#!/bin/bash} \ +\texttt{#SBATCH -A hpc2n2023-132} \ +\texttt{#SBATCH -n 14} \ +\texttt{#SBATCH –time=00:05:00} \ +\texttt{##SBATCH –exclusive} \ +\texttt{#SBATCH –reservation=intro-gpu} \ +\vspace{3mm} +\texttt{module purge > /dev/null 2>\&1} \ +\texttt{ml foss/2021b} \ +\vspace{3mm} +\texttt{srun ./my_parallel_program} +\end{footnotesize} + \end{block}

+

}

+

\begin{frame}[fragile]\frametitle{The Batch System (SLURM)}\framesubtitle{Simple example, output}

+

\begin{block}{} + \justify +Example: Output from a MPI job on Kebnekaise, run on 14 cores (one NUMA island) + \end{block}

+

\begin{block}{} +\begin{tiny} +\begin{verbatim} +b-an01 [~/slurm]$ cat slurm-15952.out

+

Processor 12 of 14: Hello World! +Processor 5 of 14: Hello World! +Processor 9 of 14: Hello World! +Processor 4 of 14: Hello World! +Processor 11 of 14: Hello World! +Processor 13 of 14: Hello World! +Processor 0 of 14: Hello World! +Processor 1 of 14: Hello World! +Processor 2 of 14: Hello World! +Processor 3 of 14: Hello World! +Processor 6 of 14: Hello World! +Processor 7 of 14: Hello World! +Processor 8 of 14: Hello World! +Processor 10 of 14: Hello World! +\end{verbatim} +\end{tiny} + \end{block}

+

\end{frame}

+

\frame{\frametitle{The Batch System (SLURM)}\framesubtitle{Starting more than one serial job in the same submit file}

+

\begin{block}{} +\begin{small} +\texttt{#!/bin/bash} \ +\texttt{#SBATCH -A hpc2n2023-132} \ +\texttt{#SBATCH -n 5} \ +\texttt{#SBATCH –time=00:15:00} \ +\vspace{3mm} +\texttt{module purge > /dev/null 2>\&1} \ +\texttt{ml foss/2021b} \ +\vspace{3mm} +\texttt{srun -n 1 ./job1.batch \&} \ +\texttt{srun -n 1 ./job2.batch \&} \ +\texttt{srun -n 1 ./job3.batch \&} \ +\texttt{srun -n 1 ./job4.batch \&} \ +\texttt{srun -n 1 ./job5.batch } \ +\texttt{wait} \ +\end{small} + \end{block}

+

}

+

\frame{\frametitle{The Batch System (SLURM)}\framesubtitle{Multiple Parallel Jobs Sequentially}

+

\begin{block}{} +\begin{scriptsize} +\texttt{#!/bin/bash} \ +\texttt{#SBATCH -A hpc2n2023-132} \ +\texttt{#SBATCH -c 28} \ +\texttt{# Remember to ask for enough time for all jobs to complete} \ +\texttt{#SBATCH –time=02:00:00} \ +\vspace{3mm} +\texttt{module purge > /dev/null 2>\&1} \ +\texttt{ml foss/2021b} \ +\vspace{3mm} +\texttt{# Here 14 tasks with 2 cores per task. Output to file.} \ +\texttt{# Not needed if your job creates output in a file} \ +\texttt{# I also copy the output somewhere else and then run} \ +\texttt{# another executable…} \ +\vspace{3mm} +\texttt{srun -n 14 -c 2 ./a.out > myoutput1 2>\&1} \ +\texttt{cp myoutput1 /pfs/nobackup/home/u/username/mydatadir} \ +\texttt{srun -n 14 -c 2 ./b.out > myoutput2 2>\&1} \ +\texttt{cp myoutput2 /pfs/nobackup/home/u/username/mydatadir} \ +\texttt{srun -n 14 -c 2 ./c.out > myoutput3 2>\&1} \ +\texttt{cp myoutput3 /pfs/nobackup/home/u/username/mydatadir} \ +\end{scriptsize} + \end{block}

+

}

+

\frame{\frametitle{The Batch System (SLURM)}\framesubtitle{Multiple Parallel Jobs Simultaneously}

+
\[\begin{footnotesize} +Make sure you ask for enough cores that all jobs can run at the same time, and have enough memory. Of course, this will also work for serial jobs - just remove the srun from the command line. +\end{footnotesize}\]
+

\begin{block}{} +\begin{footnotesize} +\texttt{#!/bin/bash} \ +\texttt{#SBATCH -A hpc2n2023-132} \ +\texttt{# Total number of cores the jobs need} \ +\texttt{#SBATCH -n 56} \ +\texttt{# Remember to ask for enough time for all of the jobs to} \ +\texttt{# complete, even the longest} \ +\texttt{#SBATCH –time=02:00:00} \ +\vspace{3mm} +\texttt{module purge > /dev/null 2>\&1} \ +\texttt{ml foss/2021b} \ +\vspace{3mm} +\texttt{srun -n 14 –cpu_bind=cores ./a.out \&} \ +\texttt{srun -n 28 –cpu_bind=cores ./b.out \&} \ +\texttt{srun -n 14 –cpu_bind=cores ./c.out \&} \ +\texttt{…} \ +\texttt{wait} \ +\end{footnotesize} + \end{block}

+

}

+

\frame{\frametitle{The Batch System (SLURM)}\framesubtitle{GPU Job - V100}

+

\begin{block}{} +\begin{footnotesize} +\texttt{#!/bin/bash} \ +\texttt{#SBATCH -A hpc2n2023-132} \ +\texttt{# Expected time for job to complete} \ +\texttt{#SBATCH –time=00:10:00} \ +\texttt{# Number of GPU cards needed. Here asking for 2 V100 cards} \ +\texttt{#SBATCH –gres=v100:2} \ +\vspace{3mm} +\texttt{module purge > /dev/null 2>\&1} \ +\texttt{# Change to modules needed for your program} \ +\texttt{ml fosscuda/2021b} \ +\vspace{3mm} +\texttt{./my-cuda-program} \ +\end{footnotesize} + \end{block}

+

}

+

\frame{\frametitle{The Batch System (SLURM)}\framesubtitle{GPU Job - A100}

+

\begin{block}{} +\begin{footnotesize} +\texttt{#!/bin/bash} \ +\texttt{#SBATCH -A hpc2n2023-132} \ +\texttt{# Expected time for job to complete} \ +\texttt{#SBATCH –time=00:10:00} \ +\texttt{# Adding the partition for the A100 GPUs} \ +\texttt{#SBATCH -p amd_gpu} \ +\texttt{# Number of GPU cards needed. Here asking for 2 A100 cards} \ +\texttt{#SBATCH –gres=a100:2} \ +\vspace{3mm} +\texttt{module purge > /dev/null 2>\&1} \ +\texttt{# Change to modules needed for your software - remember to login} \ +\texttt{# to kebnekaise-amd.hpc2n.umu.se or} \ +\texttt{# kebnekaise-amd-tl.hpc2n.umu.se login node to see availability} \ +\texttt{ml CUDA/11.7.0} \ +\vspace{3mm} +\texttt{./my-cuda-program} \ +\end{footnotesize} + \end{block}

+

}

+

\frame{\frametitle{Important information}

+

\begin{block}{} + \begin{itemize} + \begin{small} + \item The course project has the following project ID: hpc2n2023-132 + \item In order to use it in a batch job, add this to the batch script: + \begin{itemize} + \begin{small} + \item #SBATCH -A hpc2n2023-132 + \end{small} + \end{itemize} + \item There is a reservation with one A100 GPU node reserved for the course, in order to let us run small GPU examples without having to wait for too long. The reservation also is for one Broadwell CPU node. + \item The reservation is ONLY valid during the course: + \begin{itemize} + \begin{small} + \item intro-gpu \ (add with #SBATCH –reservation=intro-gpu) + \end{small} + \item To use the reservation with the A100 GPU node, also add \texttt{#SBATCH -p amd_gpu} and \texttt{#SBATCH –gres=a100:x (for x=1,2)}. + \end{itemize} + \item We have a storage project linked to the compute project. It is hpc2n2023-132. You find it in /proj/nobackup/hpc2n2023-132. Remember to create your own directory under it. + \end{small} + \end{itemize} + \end{block}

+

}

+

\frame{\frametitle{Questions and support}

+

\begin{block}{} + \textbf{Questions?} Now: Ask me or one of the other support or application experts present.

+
\vspace{0.5cm}
+OR 
+\vspace{0.5cm}
+
+\begin{itemize}
+\item Documentation: \texttt{https://www.hpc2n.umu.se/support}
+\item Support questions to: \texttt{https://supr.naiss.se/support/} or \texttt{support@hpc2n.umu.se}
+\end{itemize}
+
+

\end{block}

+

}

+

\end{document}

+ +
+
+ +
+
+ +
+ +
+ +
+ + + + « Previous + + + Next » + + +
+ + + + + + + + + + + diff --git a/css/fonts/Roboto-Slab-Bold.woff b/css/fonts/Roboto-Slab-Bold.woff new file mode 100644 index 00000000..6cb60000 Binary files /dev/null and b/css/fonts/Roboto-Slab-Bold.woff differ diff --git a/css/fonts/Roboto-Slab-Bold.woff2 b/css/fonts/Roboto-Slab-Bold.woff2 new file mode 100644 index 00000000..7059e231 Binary files /dev/null and b/css/fonts/Roboto-Slab-Bold.woff2 differ diff --git a/css/fonts/Roboto-Slab-Regular.woff b/css/fonts/Roboto-Slab-Regular.woff new file mode 100644 index 00000000..f815f63f Binary files /dev/null and b/css/fonts/Roboto-Slab-Regular.woff differ diff --git a/css/fonts/Roboto-Slab-Regular.woff2 b/css/fonts/Roboto-Slab-Regular.woff2 new file mode 100644 index 00000000..f2c76e5b Binary files /dev/null and b/css/fonts/Roboto-Slab-Regular.woff2 differ diff --git a/css/fonts/fontawesome-webfont.eot b/css/fonts/fontawesome-webfont.eot new file mode 100644 index 00000000..e9f60ca9 Binary files /dev/null and b/css/fonts/fontawesome-webfont.eot differ diff --git a/css/fonts/fontawesome-webfont.svg b/css/fonts/fontawesome-webfont.svg new file mode 100644 index 00000000..855c845e --- /dev/null +++ b/css/fonts/fontawesome-webfont.svg @@ -0,0 +1,2671 @@ + + + + +Created by FontForge 20120731 at Mon Oct 24 17:37:40 2016 + By ,,, +Copyright Dave Gandy 2016. All rights reserved. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/css/fonts/fontawesome-webfont.ttf b/css/fonts/fontawesome-webfont.ttf new file mode 100644 index 00000000..35acda2f Binary files /dev/null and b/css/fonts/fontawesome-webfont.ttf differ diff --git a/css/fonts/fontawesome-webfont.woff b/css/fonts/fontawesome-webfont.woff new file mode 100644 index 00000000..400014a4 Binary files /dev/null and b/css/fonts/fontawesome-webfont.woff differ diff --git a/css/fonts/fontawesome-webfont.woff2 b/css/fonts/fontawesome-webfont.woff2 new file mode 100644 index 00000000..4d13fc60 Binary files /dev/null and b/css/fonts/fontawesome-webfont.woff2 differ diff --git a/css/fonts/lato-bold-italic.woff b/css/fonts/lato-bold-italic.woff new file mode 100644 index 00000000..88ad05b9 Binary files /dev/null and b/css/fonts/lato-bold-italic.woff differ diff --git a/css/fonts/lato-bold-italic.woff2 b/css/fonts/lato-bold-italic.woff2 new file mode 100644 index 00000000..c4e3d804 Binary files /dev/null and b/css/fonts/lato-bold-italic.woff2 differ diff --git a/css/fonts/lato-bold.woff b/css/fonts/lato-bold.woff new file mode 100644 index 00000000..c6dff51f Binary files /dev/null and b/css/fonts/lato-bold.woff differ diff --git a/css/fonts/lato-bold.woff2 b/css/fonts/lato-bold.woff2 new file mode 100644 index 00000000..bb195043 Binary files /dev/null and b/css/fonts/lato-bold.woff2 differ diff --git a/css/fonts/lato-normal-italic.woff b/css/fonts/lato-normal-italic.woff new file mode 100644 index 00000000..76114bc0 Binary files /dev/null and b/css/fonts/lato-normal-italic.woff differ diff --git a/css/fonts/lato-normal-italic.woff2 b/css/fonts/lato-normal-italic.woff2 new file mode 100644 index 00000000..3404f37e Binary files /dev/null and b/css/fonts/lato-normal-italic.woff2 differ diff --git a/css/fonts/lato-normal.woff b/css/fonts/lato-normal.woff new file mode 100644 index 00000000..ae1307ff Binary files /dev/null and b/css/fonts/lato-normal.woff differ diff --git a/css/fonts/lato-normal.woff2 b/css/fonts/lato-normal.woff2 new file mode 100644 index 00000000..3bf98433 Binary files /dev/null and b/css/fonts/lato-normal.woff2 differ diff --git a/css/theme.css b/css/theme.css new file mode 100644 index 00000000..ad773009 --- /dev/null +++ b/css/theme.css @@ -0,0 +1,13 @@ +/* + * This file is copied from the upstream ReadTheDocs Sphinx + * theme. To aid upgradability this file should *not* be edited. + * modifications we need should be included in theme_extra.css. + * + * https://github.com/readthedocs/sphinx_rtd_theme + */ + + /* sphinx_rtd_theme version 1.2.0 | MIT license */ +html{box-sizing:border-box}*,:after,:before{box-sizing:inherit}article,aside,details,figcaption,figure,footer,header,hgroup,nav,section{display:block}audio,canvas,video{display:inline-block;*display:inline;*zoom:1}[hidden],audio:not([controls]){display:none}*{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}html{font-size:100%;-webkit-text-size-adjust:100%;-ms-text-size-adjust:100%}body{margin:0}a:active,a:hover{outline:0}abbr[title]{border-bottom:1px dotted}b,strong{font-weight:700}blockquote{margin:0}dfn{font-style:italic}ins{background:#ff9;text-decoration:none}ins,mark{color:#000}mark{background:#ff0;font-style:italic;font-weight:700}.rst-content code,.rst-content tt,code,kbd,pre,samp{font-family:monospace,serif;_font-family:courier new,monospace;font-size:1em}pre{white-space:pre}q{quotes:none}q:after,q:before{content:"";content:none}small{font-size:85%}sub,sup{font-size:75%;line-height:0;position:relative;vertical-align:baseline}sup{top:-.5em}sub{bottom:-.25em}dl,ol,ul{margin:0;padding:0;list-style:none;list-style-image:none}li{list-style:none}dd{margin:0}img{border:0;-ms-interpolation-mode:bicubic;vertical-align:middle;max-width:100%}svg:not(:root){overflow:hidden}figure,form{margin:0}label{cursor:pointer}button,input,select,textarea{font-size:100%;margin:0;vertical-align:baseline;*vertical-align:middle}button,input{line-height:normal}button,input[type=button],input[type=reset],input[type=submit]{cursor:pointer;-webkit-appearance:button;*overflow:visible}button[disabled],input[disabled]{cursor:default}input[type=search]{-webkit-appearance:textfield;-moz-box-sizing:content-box;-webkit-box-sizing:content-box;box-sizing:content-box}textarea{resize:vertical}table{border-collapse:collapse;border-spacing:0}td{vertical-align:top}.chromeframe{margin:.2em 0;background:#ccc;color:#000;padding:.2em 0}.ir{display:block;border:0;text-indent:-999em;overflow:hidden;background-color:transparent;background-repeat:no-repeat;text-align:left;direction:ltr;*line-height:0}.ir br{display:none}.hidden{display:none!important;visibility:hidden}.visuallyhidden{border:0;clip:rect(0 0 0 0);height:1px;margin:-1px;overflow:hidden;padding:0;position:absolute;width:1px}.visuallyhidden.focusable:active,.visuallyhidden.focusable:focus{clip:auto;height:auto;margin:0;overflow:visible;position:static;width:auto}.invisible{visibility:hidden}.relative{position:relative}big,small{font-size:100%}@media print{body,html,section{background:none!important}*{box-shadow:none!important;text-shadow:none!important;filter:none!important;-ms-filter:none!important}a,a:visited{text-decoration:underline}.ir a:after,a[href^="#"]:after,a[href^="javascript:"]:after{content:""}blockquote,pre{page-break-inside:avoid}thead{display:table-header-group}img,tr{page-break-inside:avoid}img{max-width:100%!important}@page{margin:.5cm}.rst-content .toctree-wrapper>p.caption,h2,h3,p{orphans:3;widows:3}.rst-content .toctree-wrapper>p.caption,h2,h3{page-break-after:avoid}}.btn,.fa:before,.icon:before,.rst-content .admonition,.rst-content .admonition-title:before,.rst-content .admonition-todo,.rst-content .attention,.rst-content .caution,.rst-content .code-block-caption .headerlink:before,.rst-content .danger,.rst-content .eqno .headerlink:before,.rst-content .error,.rst-content .hint,.rst-content .important,.rst-content .note,.rst-content .seealso,.rst-content .tip,.rst-content .warning,.rst-content code.download span:first-child:before,.rst-content dl dt .headerlink:before,.rst-content h1 .headerlink:before,.rst-content h2 .headerlink:before,.rst-content h3 .headerlink:before,.rst-content h4 .headerlink:before,.rst-content h5 .headerlink:before,.rst-content h6 .headerlink:before,.rst-content p.caption .headerlink:before,.rst-content p .headerlink:before,.rst-content table>caption .headerlink:before,.rst-content tt.download span:first-child:before,.wy-alert,.wy-dropdown .caret:before,.wy-inline-validate.wy-inline-validate-danger .wy-input-context:before,.wy-inline-validate.wy-inline-validate-info .wy-input-context:before,.wy-inline-validate.wy-inline-validate-success .wy-input-context:before,.wy-inline-validate.wy-inline-validate-warning .wy-input-context:before,.wy-menu-vertical li.current>a button.toctree-expand:before,.wy-menu-vertical li.on a button.toctree-expand:before,.wy-menu-vertical li button.toctree-expand:before,input[type=color],input[type=date],input[type=datetime-local],input[type=datetime],input[type=email],input[type=month],input[type=number],input[type=password],input[type=search],input[type=tel],input[type=text],input[type=time],input[type=url],input[type=week],select,textarea{-webkit-font-smoothing:antialiased}.clearfix{*zoom:1}.clearfix:after,.clearfix:before{display:table;content:""}.clearfix:after{clear:both}/*! + * Font Awesome 4.7.0 by @davegandy - http://fontawesome.io - @fontawesome + * License - http://fontawesome.io/license (Font: SIL OFL 1.1, CSS: MIT License) + */@font-face{font-family:FontAwesome;src:url(fonts/fontawesome-webfont.eot?674f50d287a8c48dc19ba404d20fe713);src:url(fonts/fontawesome-webfont.eot?674f50d287a8c48dc19ba404d20fe713?#iefix&v=4.7.0) format("embedded-opentype"),url(fonts/fontawesome-webfont.woff2?af7ae505a9eed503f8b8e6982036873e) format("woff2"),url(fonts/fontawesome-webfont.woff?fee66e712a8a08eef5805a46892932ad) format("woff"),url(fonts/fontawesome-webfont.ttf?b06871f281fee6b241d60582ae9369b9) format("truetype"),url(fonts/fontawesome-webfont.svg?912ec66d7572ff821749319396470bde#fontawesomeregular) format("svg");font-weight:400;font-style:normal}.fa,.icon,.rst-content .admonition-title,.rst-content .code-block-caption .headerlink,.rst-content .eqno .headerlink,.rst-content code.download span:first-child,.rst-content dl dt .headerlink,.rst-content h1 .headerlink,.rst-content h2 .headerlink,.rst-content h3 .headerlink,.rst-content h4 .headerlink,.rst-content h5 .headerlink,.rst-content h6 .headerlink,.rst-content p.caption .headerlink,.rst-content p .headerlink,.rst-content table>caption .headerlink,.rst-content tt.download span:first-child,.wy-menu-vertical li.current>a button.toctree-expand,.wy-menu-vertical li.on a button.toctree-expand,.wy-menu-vertical li button.toctree-expand{display:inline-block;font:normal normal normal 14px/1 FontAwesome;font-size:inherit;text-rendering:auto;-webkit-font-smoothing:antialiased;-moz-osx-font-smoothing:grayscale}.fa-lg{font-size:1.33333em;line-height:.75em;vertical-align:-15%}.fa-2x{font-size:2em}.fa-3x{font-size:3em}.fa-4x{font-size:4em}.fa-5x{font-size:5em}.fa-fw{width:1.28571em;text-align:center}.fa-ul{padding-left:0;margin-left:2.14286em;list-style-type:none}.fa-ul>li{position:relative}.fa-li{position:absolute;left:-2.14286em;width:2.14286em;top:.14286em;text-align:center}.fa-li.fa-lg{left:-1.85714em}.fa-border{padding:.2em .25em .15em;border:.08em solid #eee;border-radius:.1em}.fa-pull-left{float:left}.fa-pull-right{float:right}.fa-pull-left.icon,.fa.fa-pull-left,.rst-content .code-block-caption .fa-pull-left.headerlink,.rst-content .eqno .fa-pull-left.headerlink,.rst-content .fa-pull-left.admonition-title,.rst-content code.download span.fa-pull-left:first-child,.rst-content dl dt .fa-pull-left.headerlink,.rst-content h1 .fa-pull-left.headerlink,.rst-content h2 .fa-pull-left.headerlink,.rst-content h3 .fa-pull-left.headerlink,.rst-content h4 .fa-pull-left.headerlink,.rst-content h5 .fa-pull-left.headerlink,.rst-content h6 .fa-pull-left.headerlink,.rst-content p .fa-pull-left.headerlink,.rst-content table>caption .fa-pull-left.headerlink,.rst-content tt.download span.fa-pull-left:first-child,.wy-menu-vertical li.current>a button.fa-pull-left.toctree-expand,.wy-menu-vertical li.on a button.fa-pull-left.toctree-expand,.wy-menu-vertical li button.fa-pull-left.toctree-expand{margin-right:.3em}.fa-pull-right.icon,.fa.fa-pull-right,.rst-content .code-block-caption .fa-pull-right.headerlink,.rst-content .eqno .fa-pull-right.headerlink,.rst-content .fa-pull-right.admonition-title,.rst-content code.download span.fa-pull-right:first-child,.rst-content dl dt .fa-pull-right.headerlink,.rst-content h1 .fa-pull-right.headerlink,.rst-content h2 .fa-pull-right.headerlink,.rst-content h3 .fa-pull-right.headerlink,.rst-content h4 .fa-pull-right.headerlink,.rst-content h5 .fa-pull-right.headerlink,.rst-content h6 .fa-pull-right.headerlink,.rst-content p .fa-pull-right.headerlink,.rst-content table>caption .fa-pull-right.headerlink,.rst-content tt.download span.fa-pull-right:first-child,.wy-menu-vertical li.current>a button.fa-pull-right.toctree-expand,.wy-menu-vertical li.on a button.fa-pull-right.toctree-expand,.wy-menu-vertical li button.fa-pull-right.toctree-expand{margin-left:.3em}.pull-right{float:right}.pull-left{float:left}.fa.pull-left,.pull-left.icon,.rst-content .code-block-caption .pull-left.headerlink,.rst-content .eqno .pull-left.headerlink,.rst-content .pull-left.admonition-title,.rst-content code.download span.pull-left:first-child,.rst-content dl dt .pull-left.headerlink,.rst-content h1 .pull-left.headerlink,.rst-content h2 .pull-left.headerlink,.rst-content h3 .pull-left.headerlink,.rst-content h4 .pull-left.headerlink,.rst-content h5 .pull-left.headerlink,.rst-content h6 .pull-left.headerlink,.rst-content p .pull-left.headerlink,.rst-content table>caption .pull-left.headerlink,.rst-content tt.download span.pull-left:first-child,.wy-menu-vertical li.current>a button.pull-left.toctree-expand,.wy-menu-vertical li.on a button.pull-left.toctree-expand,.wy-menu-vertical li button.pull-left.toctree-expand{margin-right:.3em}.fa.pull-right,.pull-right.icon,.rst-content .code-block-caption .pull-right.headerlink,.rst-content .eqno .pull-right.headerlink,.rst-content .pull-right.admonition-title,.rst-content code.download span.pull-right:first-child,.rst-content dl dt .pull-right.headerlink,.rst-content h1 .pull-right.headerlink,.rst-content h2 .pull-right.headerlink,.rst-content h3 .pull-right.headerlink,.rst-content h4 .pull-right.headerlink,.rst-content h5 .pull-right.headerlink,.rst-content h6 .pull-right.headerlink,.rst-content p .pull-right.headerlink,.rst-content table>caption .pull-right.headerlink,.rst-content tt.download span.pull-right:first-child,.wy-menu-vertical li.current>a button.pull-right.toctree-expand,.wy-menu-vertical li.on a button.pull-right.toctree-expand,.wy-menu-vertical li button.pull-right.toctree-expand{margin-left:.3em}.fa-spin{-webkit-animation:fa-spin 2s linear infinite;animation:fa-spin 2s linear infinite}.fa-pulse{-webkit-animation:fa-spin 1s steps(8) infinite;animation:fa-spin 1s steps(8) infinite}@-webkit-keyframes fa-spin{0%{-webkit-transform:rotate(0deg);transform:rotate(0deg)}to{-webkit-transform:rotate(359deg);transform:rotate(359deg)}}@keyframes fa-spin{0%{-webkit-transform:rotate(0deg);transform:rotate(0deg)}to{-webkit-transform:rotate(359deg);transform:rotate(359deg)}}.fa-rotate-90{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=1)";-webkit-transform:rotate(90deg);-ms-transform:rotate(90deg);transform:rotate(90deg)}.fa-rotate-180{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=2)";-webkit-transform:rotate(180deg);-ms-transform:rotate(180deg);transform:rotate(180deg)}.fa-rotate-270{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=3)";-webkit-transform:rotate(270deg);-ms-transform:rotate(270deg);transform:rotate(270deg)}.fa-flip-horizontal{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=0, mirror=1)";-webkit-transform:scaleX(-1);-ms-transform:scaleX(-1);transform:scaleX(-1)}.fa-flip-vertical{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=2, mirror=1)";-webkit-transform:scaleY(-1);-ms-transform:scaleY(-1);transform:scaleY(-1)}:root .fa-flip-horizontal,:root .fa-flip-vertical,:root .fa-rotate-90,:root .fa-rotate-180,:root .fa-rotate-270{filter:none}.fa-stack{position:relative;display:inline-block;width:2em;height:2em;line-height:2em;vertical-align:middle}.fa-stack-1x,.fa-stack-2x{position:absolute;left:0;width:100%;text-align:center}.fa-stack-1x{line-height:inherit}.fa-stack-2x{font-size:2em}.fa-inverse{color:#fff}.fa-glass:before{content:""}.fa-music:before{content:""}.fa-search:before,.icon-search:before{content:""}.fa-envelope-o:before{content:""}.fa-heart:before{content:""}.fa-star:before{content:""}.fa-star-o:before{content:""}.fa-user:before{content:""}.fa-film:before{content:""}.fa-th-large:before{content:""}.fa-th:before{content:""}.fa-th-list:before{content:""}.fa-check:before{content:""}.fa-close:before,.fa-remove:before,.fa-times:before{content:""}.fa-search-plus:before{content:""}.fa-search-minus:before{content:""}.fa-power-off:before{content:""}.fa-signal:before{content:""}.fa-cog:before,.fa-gear:before{content:""}.fa-trash-o:before{content:""}.fa-home:before,.icon-home:before{content:""}.fa-file-o:before{content:""}.fa-clock-o:before{content:""}.fa-road:before{content:""}.fa-download:before,.rst-content code.download span:first-child:before,.rst-content tt.download span:first-child:before{content:""}.fa-arrow-circle-o-down:before{content:""}.fa-arrow-circle-o-up:before{content:""}.fa-inbox:before{content:""}.fa-play-circle-o:before{content:""}.fa-repeat:before,.fa-rotate-right:before{content:""}.fa-refresh:before{content:""}.fa-list-alt:before{content:""}.fa-lock:before{content:""}.fa-flag:before{content:""}.fa-headphones:before{content:""}.fa-volume-off:before{content:""}.fa-volume-down:before{content:""}.fa-volume-up:before{content:""}.fa-qrcode:before{content:""}.fa-barcode:before{content:""}.fa-tag:before{content:""}.fa-tags:before{content:""}.fa-book:before,.icon-book:before{content:""}.fa-bookmark:before{content:""}.fa-print:before{content:""}.fa-camera:before{content:""}.fa-font:before{content:""}.fa-bold:before{content:""}.fa-italic:before{content:""}.fa-text-height:before{content:""}.fa-text-width:before{content:""}.fa-align-left:before{content:""}.fa-align-center:before{content:""}.fa-align-right:before{content:""}.fa-align-justify:before{content:""}.fa-list:before{content:""}.fa-dedent:before,.fa-outdent:before{content:""}.fa-indent:before{content:""}.fa-video-camera:before{content:""}.fa-image:before,.fa-photo:before,.fa-picture-o:before{content:""}.fa-pencil:before{content:""}.fa-map-marker:before{content:""}.fa-adjust:before{content:""}.fa-tint:before{content:""}.fa-edit:before,.fa-pencil-square-o:before{content:""}.fa-share-square-o:before{content:""}.fa-check-square-o:before{content:""}.fa-arrows:before{content:""}.fa-step-backward:before{content:""}.fa-fast-backward:before{content:""}.fa-backward:before{content:""}.fa-play:before{content:""}.fa-pause:before{content:""}.fa-stop:before{content:""}.fa-forward:before{content:""}.fa-fast-forward:before{content:""}.fa-step-forward:before{content:""}.fa-eject:before{content:""}.fa-chevron-left:before{content:""}.fa-chevron-right:before{content:""}.fa-plus-circle:before{content:""}.fa-minus-circle:before{content:""}.fa-times-circle:before,.wy-inline-validate.wy-inline-validate-danger .wy-input-context:before{content:""}.fa-check-circle:before,.wy-inline-validate.wy-inline-validate-success .wy-input-context:before{content:""}.fa-question-circle:before{content:""}.fa-info-circle:before{content:""}.fa-crosshairs:before{content:""}.fa-times-circle-o:before{content:""}.fa-check-circle-o:before{content:""}.fa-ban:before{content:""}.fa-arrow-left:before{content:""}.fa-arrow-right:before{content:""}.fa-arrow-up:before{content:""}.fa-arrow-down:before{content:""}.fa-mail-forward:before,.fa-share:before{content:""}.fa-expand:before{content:""}.fa-compress:before{content:""}.fa-plus:before{content:""}.fa-minus:before{content:""}.fa-asterisk:before{content:""}.fa-exclamation-circle:before,.rst-content .admonition-title:before,.wy-inline-validate.wy-inline-validate-info .wy-input-context:before,.wy-inline-validate.wy-inline-validate-warning .wy-input-context:before{content:""}.fa-gift:before{content:""}.fa-leaf:before{content:""}.fa-fire:before,.icon-fire:before{content:""}.fa-eye:before{content:""}.fa-eye-slash:before{content:""}.fa-exclamation-triangle:before,.fa-warning:before{content:""}.fa-plane:before{content:""}.fa-calendar:before{content:""}.fa-random:before{content:""}.fa-comment:before{content:""}.fa-magnet:before{content:""}.fa-chevron-up:before{content:""}.fa-chevron-down:before{content:""}.fa-retweet:before{content:""}.fa-shopping-cart:before{content:""}.fa-folder:before{content:""}.fa-folder-open:before{content:""}.fa-arrows-v:before{content:""}.fa-arrows-h:before{content:""}.fa-bar-chart-o:before,.fa-bar-chart:before{content:""}.fa-twitter-square:before{content:""}.fa-facebook-square:before{content:""}.fa-camera-retro:before{content:""}.fa-key:before{content:""}.fa-cogs:before,.fa-gears:before{content:""}.fa-comments:before{content:""}.fa-thumbs-o-up:before{content:""}.fa-thumbs-o-down:before{content:""}.fa-star-half:before{content:""}.fa-heart-o:before{content:""}.fa-sign-out:before{content:""}.fa-linkedin-square:before{content:""}.fa-thumb-tack:before{content:""}.fa-external-link:before{content:""}.fa-sign-in:before{content:""}.fa-trophy:before{content:""}.fa-github-square:before{content:""}.fa-upload:before{content:""}.fa-lemon-o:before{content:""}.fa-phone:before{content:""}.fa-square-o:before{content:""}.fa-bookmark-o:before{content:""}.fa-phone-square:before{content:""}.fa-twitter:before{content:""}.fa-facebook-f:before,.fa-facebook:before{content:""}.fa-github:before,.icon-github:before{content:""}.fa-unlock:before{content:""}.fa-credit-card:before{content:""}.fa-feed:before,.fa-rss:before{content:""}.fa-hdd-o:before{content:""}.fa-bullhorn:before{content:""}.fa-bell:before{content:""}.fa-certificate:before{content:""}.fa-hand-o-right:before{content:""}.fa-hand-o-left:before{content:""}.fa-hand-o-up:before{content:""}.fa-hand-o-down:before{content:""}.fa-arrow-circle-left:before,.icon-circle-arrow-left:before{content:""}.fa-arrow-circle-right:before,.icon-circle-arrow-right:before{content:""}.fa-arrow-circle-up:before{content:""}.fa-arrow-circle-down:before{content:""}.fa-globe:before{content:""}.fa-wrench:before{content:""}.fa-tasks:before{content:""}.fa-filter:before{content:""}.fa-briefcase:before{content:""}.fa-arrows-alt:before{content:""}.fa-group:before,.fa-users:before{content:""}.fa-chain:before,.fa-link:before,.icon-link:before{content:""}.fa-cloud:before{content:""}.fa-flask:before{content:""}.fa-cut:before,.fa-scissors:before{content:""}.fa-copy:before,.fa-files-o:before{content:""}.fa-paperclip:before{content:""}.fa-floppy-o:before,.fa-save:before{content:""}.fa-square:before{content:""}.fa-bars:before,.fa-navicon:before,.fa-reorder:before{content:""}.fa-list-ul:before{content:""}.fa-list-ol:before{content:""}.fa-strikethrough:before{content:""}.fa-underline:before{content:""}.fa-table:before{content:""}.fa-magic:before{content:""}.fa-truck:before{content:""}.fa-pinterest:before{content:""}.fa-pinterest-square:before{content:""}.fa-google-plus-square:before{content:""}.fa-google-plus:before{content:""}.fa-money:before{content:""}.fa-caret-down:before,.icon-caret-down:before,.wy-dropdown .caret:before{content:""}.fa-caret-up:before{content:""}.fa-caret-left:before{content:""}.fa-caret-right:before{content:""}.fa-columns:before{content:""}.fa-sort:before,.fa-unsorted:before{content:""}.fa-sort-desc:before,.fa-sort-down:before{content:""}.fa-sort-asc:before,.fa-sort-up:before{content:""}.fa-envelope:before{content:""}.fa-linkedin:before{content:""}.fa-rotate-left:before,.fa-undo:before{content:""}.fa-gavel:before,.fa-legal:before{content:""}.fa-dashboard:before,.fa-tachometer:before{content:""}.fa-comment-o:before{content:""}.fa-comments-o:before{content:""}.fa-bolt:before,.fa-flash:before{content:""}.fa-sitemap:before{content:""}.fa-umbrella:before{content:""}.fa-clipboard:before,.fa-paste:before{content:""}.fa-lightbulb-o:before{content:""}.fa-exchange:before{content:""}.fa-cloud-download:before{content:""}.fa-cloud-upload:before{content:""}.fa-user-md:before{content:""}.fa-stethoscope:before{content:""}.fa-suitcase:before{content:""}.fa-bell-o:before{content:""}.fa-coffee:before{content:""}.fa-cutlery:before{content:""}.fa-file-text-o:before{content:""}.fa-building-o:before{content:""}.fa-hospital-o:before{content:""}.fa-ambulance:before{content:""}.fa-medkit:before{content:""}.fa-fighter-jet:before{content:""}.fa-beer:before{content:""}.fa-h-square:before{content:""}.fa-plus-square:before{content:""}.fa-angle-double-left:before{content:""}.fa-angle-double-right:before{content:""}.fa-angle-double-up:before{content:""}.fa-angle-double-down:before{content:""}.fa-angle-left:before{content:""}.fa-angle-right:before{content:""}.fa-angle-up:before{content:""}.fa-angle-down:before{content:""}.fa-desktop:before{content:""}.fa-laptop:before{content:""}.fa-tablet:before{content:""}.fa-mobile-phone:before,.fa-mobile:before{content:""}.fa-circle-o:before{content:""}.fa-quote-left:before{content:""}.fa-quote-right:before{content:""}.fa-spinner:before{content:""}.fa-circle:before{content:""}.fa-mail-reply:before,.fa-reply:before{content:""}.fa-github-alt:before{content:""}.fa-folder-o:before{content:""}.fa-folder-open-o:before{content:""}.fa-smile-o:before{content:""}.fa-frown-o:before{content:""}.fa-meh-o:before{content:""}.fa-gamepad:before{content:""}.fa-keyboard-o:before{content:""}.fa-flag-o:before{content:""}.fa-flag-checkered:before{content:""}.fa-terminal:before{content:""}.fa-code:before{content:""}.fa-mail-reply-all:before,.fa-reply-all:before{content:""}.fa-star-half-empty:before,.fa-star-half-full:before,.fa-star-half-o:before{content:""}.fa-location-arrow:before{content:""}.fa-crop:before{content:""}.fa-code-fork:before{content:""}.fa-chain-broken:before,.fa-unlink:before{content:""}.fa-question:before{content:""}.fa-info:before{content:""}.fa-exclamation:before{content:""}.fa-superscript:before{content:""}.fa-subscript:before{content:""}.fa-eraser:before{content:""}.fa-puzzle-piece:before{content:""}.fa-microphone:before{content:""}.fa-microphone-slash:before{content:""}.fa-shield:before{content:""}.fa-calendar-o:before{content:""}.fa-fire-extinguisher:before{content:""}.fa-rocket:before{content:""}.fa-maxcdn:before{content:""}.fa-chevron-circle-left:before{content:""}.fa-chevron-circle-right:before{content:""}.fa-chevron-circle-up:before{content:""}.fa-chevron-circle-down:before{content:""}.fa-html5:before{content:""}.fa-css3:before{content:""}.fa-anchor:before{content:""}.fa-unlock-alt:before{content:""}.fa-bullseye:before{content:""}.fa-ellipsis-h:before{content:""}.fa-ellipsis-v:before{content:""}.fa-rss-square:before{content:""}.fa-play-circle:before{content:""}.fa-ticket:before{content:""}.fa-minus-square:before{content:""}.fa-minus-square-o:before,.wy-menu-vertical li.current>a button.toctree-expand:before,.wy-menu-vertical li.on a button.toctree-expand:before{content:""}.fa-level-up:before{content:""}.fa-level-down:before{content:""}.fa-check-square:before{content:""}.fa-pencil-square:before{content:""}.fa-external-link-square:before{content:""}.fa-share-square:before{content:""}.fa-compass:before{content:""}.fa-caret-square-o-down:before,.fa-toggle-down:before{content:""}.fa-caret-square-o-up:before,.fa-toggle-up:before{content:""}.fa-caret-square-o-right:before,.fa-toggle-right:before{content:""}.fa-eur:before,.fa-euro:before{content:""}.fa-gbp:before{content:""}.fa-dollar:before,.fa-usd:before{content:""}.fa-inr:before,.fa-rupee:before{content:""}.fa-cny:before,.fa-jpy:before,.fa-rmb:before,.fa-yen:before{content:""}.fa-rouble:before,.fa-rub:before,.fa-ruble:before{content:""}.fa-krw:before,.fa-won:before{content:""}.fa-bitcoin:before,.fa-btc:before{content:""}.fa-file:before{content:""}.fa-file-text:before{content:""}.fa-sort-alpha-asc:before{content:""}.fa-sort-alpha-desc:before{content:""}.fa-sort-amount-asc:before{content:""}.fa-sort-amount-desc:before{content:""}.fa-sort-numeric-asc:before{content:""}.fa-sort-numeric-desc:before{content:""}.fa-thumbs-up:before{content:""}.fa-thumbs-down:before{content:""}.fa-youtube-square:before{content:""}.fa-youtube:before{content:""}.fa-xing:before{content:""}.fa-xing-square:before{content:""}.fa-youtube-play:before{content:""}.fa-dropbox:before{content:""}.fa-stack-overflow:before{content:""}.fa-instagram:before{content:""}.fa-flickr:before{content:""}.fa-adn:before{content:""}.fa-bitbucket:before,.icon-bitbucket:before{content:""}.fa-bitbucket-square:before{content:""}.fa-tumblr:before{content:""}.fa-tumblr-square:before{content:""}.fa-long-arrow-down:before{content:""}.fa-long-arrow-up:before{content:""}.fa-long-arrow-left:before{content:""}.fa-long-arrow-right:before{content:""}.fa-apple:before{content:""}.fa-windows:before{content:""}.fa-android:before{content:""}.fa-linux:before{content:""}.fa-dribbble:before{content:""}.fa-skype:before{content:""}.fa-foursquare:before{content:""}.fa-trello:before{content:""}.fa-female:before{content:""}.fa-male:before{content:""}.fa-gittip:before,.fa-gratipay:before{content:""}.fa-sun-o:before{content:""}.fa-moon-o:before{content:""}.fa-archive:before{content:""}.fa-bug:before{content:""}.fa-vk:before{content:""}.fa-weibo:before{content:""}.fa-renren:before{content:""}.fa-pagelines:before{content:""}.fa-stack-exchange:before{content:""}.fa-arrow-circle-o-right:before{content:""}.fa-arrow-circle-o-left:before{content:""}.fa-caret-square-o-left:before,.fa-toggle-left:before{content:""}.fa-dot-circle-o:before{content:""}.fa-wheelchair:before{content:""}.fa-vimeo-square:before{content:""}.fa-try:before,.fa-turkish-lira:before{content:""}.fa-plus-square-o:before,.wy-menu-vertical li button.toctree-expand:before{content:""}.fa-space-shuttle:before{content:""}.fa-slack:before{content:""}.fa-envelope-square:before{content:""}.fa-wordpress:before{content:""}.fa-openid:before{content:""}.fa-bank:before,.fa-institution:before,.fa-university:before{content:""}.fa-graduation-cap:before,.fa-mortar-board:before{content:""}.fa-yahoo:before{content:""}.fa-google:before{content:""}.fa-reddit:before{content:""}.fa-reddit-square:before{content:""}.fa-stumbleupon-circle:before{content:""}.fa-stumbleupon:before{content:""}.fa-delicious:before{content:""}.fa-digg:before{content:""}.fa-pied-piper-pp:before{content:""}.fa-pied-piper-alt:before{content:""}.fa-drupal:before{content:""}.fa-joomla:before{content:""}.fa-language:before{content:""}.fa-fax:before{content:""}.fa-building:before{content:""}.fa-child:before{content:""}.fa-paw:before{content:""}.fa-spoon:before{content:""}.fa-cube:before{content:""}.fa-cubes:before{content:""}.fa-behance:before{content:""}.fa-behance-square:before{content:""}.fa-steam:before{content:""}.fa-steam-square:before{content:""}.fa-recycle:before{content:""}.fa-automobile:before,.fa-car:before{content:""}.fa-cab:before,.fa-taxi:before{content:""}.fa-tree:before{content:""}.fa-spotify:before{content:""}.fa-deviantart:before{content:""}.fa-soundcloud:before{content:""}.fa-database:before{content:""}.fa-file-pdf-o:before{content:""}.fa-file-word-o:before{content:""}.fa-file-excel-o:before{content:""}.fa-file-powerpoint-o:before{content:""}.fa-file-image-o:before,.fa-file-photo-o:before,.fa-file-picture-o:before{content:""}.fa-file-archive-o:before,.fa-file-zip-o:before{content:""}.fa-file-audio-o:before,.fa-file-sound-o:before{content:""}.fa-file-movie-o:before,.fa-file-video-o:before{content:""}.fa-file-code-o:before{content:""}.fa-vine:before{content:""}.fa-codepen:before{content:""}.fa-jsfiddle:before{content:""}.fa-life-bouy:before,.fa-life-buoy:before,.fa-life-ring:before,.fa-life-saver:before,.fa-support:before{content:""}.fa-circle-o-notch:before{content:""}.fa-ra:before,.fa-rebel:before,.fa-resistance:before{content:""}.fa-empire:before,.fa-ge:before{content:""}.fa-git-square:before{content:""}.fa-git:before{content:""}.fa-hacker-news:before,.fa-y-combinator-square:before,.fa-yc-square:before{content:""}.fa-tencent-weibo:before{content:""}.fa-qq:before{content:""}.fa-wechat:before,.fa-weixin:before{content:""}.fa-paper-plane:before,.fa-send:before{content:""}.fa-paper-plane-o:before,.fa-send-o:before{content:""}.fa-history:before{content:""}.fa-circle-thin:before{content:""}.fa-header:before{content:""}.fa-paragraph:before{content:""}.fa-sliders:before{content:""}.fa-share-alt:before{content:""}.fa-share-alt-square:before{content:""}.fa-bomb:before{content:""}.fa-futbol-o:before,.fa-soccer-ball-o:before{content:""}.fa-tty:before{content:""}.fa-binoculars:before{content:""}.fa-plug:before{content:""}.fa-slideshare:before{content:""}.fa-twitch:before{content:""}.fa-yelp:before{content:""}.fa-newspaper-o:before{content:""}.fa-wifi:before{content:""}.fa-calculator:before{content:""}.fa-paypal:before{content:""}.fa-google-wallet:before{content:""}.fa-cc-visa:before{content:""}.fa-cc-mastercard:before{content:""}.fa-cc-discover:before{content:""}.fa-cc-amex:before{content:""}.fa-cc-paypal:before{content:""}.fa-cc-stripe:before{content:""}.fa-bell-slash:before{content:""}.fa-bell-slash-o:before{content:""}.fa-trash:before{content:""}.fa-copyright:before{content:""}.fa-at:before{content:""}.fa-eyedropper:before{content:""}.fa-paint-brush:before{content:""}.fa-birthday-cake:before{content:""}.fa-area-chart:before{content:""}.fa-pie-chart:before{content:""}.fa-line-chart:before{content:""}.fa-lastfm:before{content:""}.fa-lastfm-square:before{content:""}.fa-toggle-off:before{content:""}.fa-toggle-on:before{content:""}.fa-bicycle:before{content:""}.fa-bus:before{content:""}.fa-ioxhost:before{content:""}.fa-angellist:before{content:""}.fa-cc:before{content:""}.fa-ils:before,.fa-shekel:before,.fa-sheqel:before{content:""}.fa-meanpath:before{content:""}.fa-buysellads:before{content:""}.fa-connectdevelop:before{content:""}.fa-dashcube:before{content:""}.fa-forumbee:before{content:""}.fa-leanpub:before{content:""}.fa-sellsy:before{content:""}.fa-shirtsinbulk:before{content:""}.fa-simplybuilt:before{content:""}.fa-skyatlas:before{content:""}.fa-cart-plus:before{content:""}.fa-cart-arrow-down:before{content:""}.fa-diamond:before{content:""}.fa-ship:before{content:""}.fa-user-secret:before{content:""}.fa-motorcycle:before{content:""}.fa-street-view:before{content:""}.fa-heartbeat:before{content:""}.fa-venus:before{content:""}.fa-mars:before{content:""}.fa-mercury:before{content:""}.fa-intersex:before,.fa-transgender:before{content:""}.fa-transgender-alt:before{content:""}.fa-venus-double:before{content:""}.fa-mars-double:before{content:""}.fa-venus-mars:before{content:""}.fa-mars-stroke:before{content:""}.fa-mars-stroke-v:before{content:""}.fa-mars-stroke-h:before{content:""}.fa-neuter:before{content:""}.fa-genderless:before{content:""}.fa-facebook-official:before{content:""}.fa-pinterest-p:before{content:""}.fa-whatsapp:before{content:""}.fa-server:before{content:""}.fa-user-plus:before{content:""}.fa-user-times:before{content:""}.fa-bed:before,.fa-hotel:before{content:""}.fa-viacoin:before{content:""}.fa-train:before{content:""}.fa-subway:before{content:""}.fa-medium:before{content:""}.fa-y-combinator:before,.fa-yc:before{content:""}.fa-optin-monster:before{content:""}.fa-opencart:before{content:""}.fa-expeditedssl:before{content:""}.fa-battery-4:before,.fa-battery-full:before,.fa-battery:before{content:""}.fa-battery-3:before,.fa-battery-three-quarters:before{content:""}.fa-battery-2:before,.fa-battery-half:before{content:""}.fa-battery-1:before,.fa-battery-quarter:before{content:""}.fa-battery-0:before,.fa-battery-empty:before{content:""}.fa-mouse-pointer:before{content:""}.fa-i-cursor:before{content:""}.fa-object-group:before{content:""}.fa-object-ungroup:before{content:""}.fa-sticky-note:before{content:""}.fa-sticky-note-o:before{content:""}.fa-cc-jcb:before{content:""}.fa-cc-diners-club:before{content:""}.fa-clone:before{content:""}.fa-balance-scale:before{content:""}.fa-hourglass-o:before{content:""}.fa-hourglass-1:before,.fa-hourglass-start:before{content:""}.fa-hourglass-2:before,.fa-hourglass-half:before{content:""}.fa-hourglass-3:before,.fa-hourglass-end:before{content:""}.fa-hourglass:before{content:""}.fa-hand-grab-o:before,.fa-hand-rock-o:before{content:""}.fa-hand-paper-o:before,.fa-hand-stop-o:before{content:""}.fa-hand-scissors-o:before{content:""}.fa-hand-lizard-o:before{content:""}.fa-hand-spock-o:before{content:""}.fa-hand-pointer-o:before{content:""}.fa-hand-peace-o:before{content:""}.fa-trademark:before{content:""}.fa-registered:before{content:""}.fa-creative-commons:before{content:""}.fa-gg:before{content:""}.fa-gg-circle:before{content:""}.fa-tripadvisor:before{content:""}.fa-odnoklassniki:before{content:""}.fa-odnoklassniki-square:before{content:""}.fa-get-pocket:before{content:""}.fa-wikipedia-w:before{content:""}.fa-safari:before{content:""}.fa-chrome:before{content:""}.fa-firefox:before{content:""}.fa-opera:before{content:""}.fa-internet-explorer:before{content:""}.fa-television:before,.fa-tv:before{content:""}.fa-contao:before{content:""}.fa-500px:before{content:""}.fa-amazon:before{content:""}.fa-calendar-plus-o:before{content:""}.fa-calendar-minus-o:before{content:""}.fa-calendar-times-o:before{content:""}.fa-calendar-check-o:before{content:""}.fa-industry:before{content:""}.fa-map-pin:before{content:""}.fa-map-signs:before{content:""}.fa-map-o:before{content:""}.fa-map:before{content:""}.fa-commenting:before{content:""}.fa-commenting-o:before{content:""}.fa-houzz:before{content:""}.fa-vimeo:before{content:""}.fa-black-tie:before{content:""}.fa-fonticons:before{content:""}.fa-reddit-alien:before{content:""}.fa-edge:before{content:""}.fa-credit-card-alt:before{content:""}.fa-codiepie:before{content:""}.fa-modx:before{content:""}.fa-fort-awesome:before{content:""}.fa-usb:before{content:""}.fa-product-hunt:before{content:""}.fa-mixcloud:before{content:""}.fa-scribd:before{content:""}.fa-pause-circle:before{content:""}.fa-pause-circle-o:before{content:""}.fa-stop-circle:before{content:""}.fa-stop-circle-o:before{content:""}.fa-shopping-bag:before{content:""}.fa-shopping-basket:before{content:""}.fa-hashtag:before{content:""}.fa-bluetooth:before{content:""}.fa-bluetooth-b:before{content:""}.fa-percent:before{content:""}.fa-gitlab:before,.icon-gitlab:before{content:""}.fa-wpbeginner:before{content:""}.fa-wpforms:before{content:""}.fa-envira:before{content:""}.fa-universal-access:before{content:""}.fa-wheelchair-alt:before{content:""}.fa-question-circle-o:before{content:""}.fa-blind:before{content:""}.fa-audio-description:before{content:""}.fa-volume-control-phone:before{content:""}.fa-braille:before{content:""}.fa-assistive-listening-systems:before{content:""}.fa-american-sign-language-interpreting:before,.fa-asl-interpreting:before{content:""}.fa-deaf:before,.fa-deafness:before,.fa-hard-of-hearing:before{content:""}.fa-glide:before{content:""}.fa-glide-g:before{content:""}.fa-sign-language:before,.fa-signing:before{content:""}.fa-low-vision:before{content:""}.fa-viadeo:before{content:""}.fa-viadeo-square:before{content:""}.fa-snapchat:before{content:""}.fa-snapchat-ghost:before{content:""}.fa-snapchat-square:before{content:""}.fa-pied-piper:before{content:""}.fa-first-order:before{content:""}.fa-yoast:before{content:""}.fa-themeisle:before{content:""}.fa-google-plus-circle:before,.fa-google-plus-official:before{content:""}.fa-fa:before,.fa-font-awesome:before{content:""}.fa-handshake-o:before{content:""}.fa-envelope-open:before{content:""}.fa-envelope-open-o:before{content:""}.fa-linode:before{content:""}.fa-address-book:before{content:""}.fa-address-book-o:before{content:""}.fa-address-card:before,.fa-vcard:before{content:""}.fa-address-card-o:before,.fa-vcard-o:before{content:""}.fa-user-circle:before{content:""}.fa-user-circle-o:before{content:""}.fa-user-o:before{content:""}.fa-id-badge:before{content:""}.fa-drivers-license:before,.fa-id-card:before{content:""}.fa-drivers-license-o:before,.fa-id-card-o:before{content:""}.fa-quora:before{content:""}.fa-free-code-camp:before{content:""}.fa-telegram:before{content:""}.fa-thermometer-4:before,.fa-thermometer-full:before,.fa-thermometer:before{content:""}.fa-thermometer-3:before,.fa-thermometer-three-quarters:before{content:""}.fa-thermometer-2:before,.fa-thermometer-half:before{content:""}.fa-thermometer-1:before,.fa-thermometer-quarter:before{content:""}.fa-thermometer-0:before,.fa-thermometer-empty:before{content:""}.fa-shower:before{content:""}.fa-bath:before,.fa-bathtub:before,.fa-s15:before{content:""}.fa-podcast:before{content:""}.fa-window-maximize:before{content:""}.fa-window-minimize:before{content:""}.fa-window-restore:before{content:""}.fa-times-rectangle:before,.fa-window-close:before{content:""}.fa-times-rectangle-o:before,.fa-window-close-o:before{content:""}.fa-bandcamp:before{content:""}.fa-grav:before{content:""}.fa-etsy:before{content:""}.fa-imdb:before{content:""}.fa-ravelry:before{content:""}.fa-eercast:before{content:""}.fa-microchip:before{content:""}.fa-snowflake-o:before{content:""}.fa-superpowers:before{content:""}.fa-wpexplorer:before{content:""}.fa-meetup:before{content:""}.sr-only{position:absolute;width:1px;height:1px;padding:0;margin:-1px;overflow:hidden;clip:rect(0,0,0,0);border:0}.sr-only-focusable:active,.sr-only-focusable:focus{position:static;width:auto;height:auto;margin:0;overflow:visible;clip:auto}.fa,.icon,.rst-content .admonition-title,.rst-content .code-block-caption .headerlink,.rst-content .eqno .headerlink,.rst-content code.download span:first-child,.rst-content dl dt .headerlink,.rst-content h1 .headerlink,.rst-content h2 .headerlink,.rst-content h3 .headerlink,.rst-content h4 .headerlink,.rst-content h5 .headerlink,.rst-content h6 .headerlink,.rst-content p.caption .headerlink,.rst-content p .headerlink,.rst-content table>caption .headerlink,.rst-content tt.download span:first-child,.wy-dropdown .caret,.wy-inline-validate.wy-inline-validate-danger .wy-input-context,.wy-inline-validate.wy-inline-validate-info .wy-input-context,.wy-inline-validate.wy-inline-validate-success .wy-input-context,.wy-inline-validate.wy-inline-validate-warning .wy-input-context,.wy-menu-vertical li.current>a button.toctree-expand,.wy-menu-vertical li.on a button.toctree-expand,.wy-menu-vertical li button.toctree-expand{font-family:inherit}.fa:before,.icon:before,.rst-content .admonition-title:before,.rst-content .code-block-caption .headerlink:before,.rst-content .eqno .headerlink:before,.rst-content code.download span:first-child:before,.rst-content dl dt .headerlink:before,.rst-content h1 .headerlink:before,.rst-content h2 .headerlink:before,.rst-content h3 .headerlink:before,.rst-content h4 .headerlink:before,.rst-content h5 .headerlink:before,.rst-content h6 .headerlink:before,.rst-content p.caption .headerlink:before,.rst-content p .headerlink:before,.rst-content table>caption .headerlink:before,.rst-content tt.download span:first-child:before,.wy-dropdown .caret:before,.wy-inline-validate.wy-inline-validate-danger .wy-input-context:before,.wy-inline-validate.wy-inline-validate-info .wy-input-context:before,.wy-inline-validate.wy-inline-validate-success .wy-input-context:before,.wy-inline-validate.wy-inline-validate-warning .wy-input-context:before,.wy-menu-vertical li.current>a button.toctree-expand:before,.wy-menu-vertical li.on a button.toctree-expand:before,.wy-menu-vertical li button.toctree-expand:before{font-family:FontAwesome;display:inline-block;font-style:normal;font-weight:400;line-height:1;text-decoration:inherit}.rst-content .code-block-caption a .headerlink,.rst-content .eqno a .headerlink,.rst-content a .admonition-title,.rst-content code.download a span:first-child,.rst-content dl dt a .headerlink,.rst-content h1 a .headerlink,.rst-content h2 a .headerlink,.rst-content h3 a .headerlink,.rst-content h4 a .headerlink,.rst-content h5 a .headerlink,.rst-content h6 a .headerlink,.rst-content p.caption a .headerlink,.rst-content p a .headerlink,.rst-content table>caption a .headerlink,.rst-content tt.download a span:first-child,.wy-menu-vertical li.current>a button.toctree-expand,.wy-menu-vertical li.on a button.toctree-expand,.wy-menu-vertical li a button.toctree-expand,a .fa,a .icon,a .rst-content .admonition-title,a .rst-content .code-block-caption .headerlink,a .rst-content .eqno .headerlink,a .rst-content code.download span:first-child,a .rst-content dl dt .headerlink,a .rst-content h1 .headerlink,a .rst-content h2 .headerlink,a .rst-content h3 .headerlink,a .rst-content h4 .headerlink,a .rst-content h5 .headerlink,a .rst-content h6 .headerlink,a .rst-content p.caption .headerlink,a .rst-content p .headerlink,a .rst-content table>caption .headerlink,a .rst-content tt.download span:first-child,a .wy-menu-vertical li button.toctree-expand{display:inline-block;text-decoration:inherit}.btn .fa,.btn .icon,.btn .rst-content .admonition-title,.btn .rst-content .code-block-caption .headerlink,.btn .rst-content .eqno .headerlink,.btn .rst-content code.download span:first-child,.btn .rst-content dl dt .headerlink,.btn .rst-content h1 .headerlink,.btn .rst-content h2 .headerlink,.btn .rst-content h3 .headerlink,.btn .rst-content h4 .headerlink,.btn .rst-content h5 .headerlink,.btn .rst-content h6 .headerlink,.btn .rst-content p .headerlink,.btn .rst-content table>caption .headerlink,.btn .rst-content tt.download span:first-child,.btn .wy-menu-vertical li.current>a button.toctree-expand,.btn .wy-menu-vertical li.on a button.toctree-expand,.btn .wy-menu-vertical li button.toctree-expand,.nav .fa,.nav .icon,.nav .rst-content .admonition-title,.nav .rst-content .code-block-caption .headerlink,.nav .rst-content .eqno .headerlink,.nav .rst-content code.download span:first-child,.nav .rst-content dl dt .headerlink,.nav .rst-content h1 .headerlink,.nav .rst-content h2 .headerlink,.nav .rst-content h3 .headerlink,.nav .rst-content h4 .headerlink,.nav .rst-content h5 .headerlink,.nav .rst-content h6 .headerlink,.nav .rst-content p .headerlink,.nav .rst-content table>caption .headerlink,.nav .rst-content tt.download span:first-child,.nav .wy-menu-vertical li.current>a button.toctree-expand,.nav .wy-menu-vertical li.on a button.toctree-expand,.nav .wy-menu-vertical li button.toctree-expand,.rst-content .btn .admonition-title,.rst-content .code-block-caption .btn .headerlink,.rst-content .code-block-caption .nav .headerlink,.rst-content .eqno .btn .headerlink,.rst-content .eqno .nav .headerlink,.rst-content .nav .admonition-title,.rst-content code.download .btn span:first-child,.rst-content code.download .nav span:first-child,.rst-content dl dt .btn .headerlink,.rst-content dl dt .nav .headerlink,.rst-content h1 .btn .headerlink,.rst-content h1 .nav .headerlink,.rst-content h2 .btn .headerlink,.rst-content h2 .nav .headerlink,.rst-content h3 .btn .headerlink,.rst-content h3 .nav .headerlink,.rst-content h4 .btn .headerlink,.rst-content h4 .nav .headerlink,.rst-content h5 .btn .headerlink,.rst-content h5 .nav .headerlink,.rst-content h6 .btn .headerlink,.rst-content h6 .nav .headerlink,.rst-content p .btn .headerlink,.rst-content p .nav .headerlink,.rst-content table>caption .btn .headerlink,.rst-content table>caption .nav .headerlink,.rst-content tt.download .btn span:first-child,.rst-content tt.download .nav span:first-child,.wy-menu-vertical li .btn button.toctree-expand,.wy-menu-vertical li.current>a .btn button.toctree-expand,.wy-menu-vertical li.current>a .nav button.toctree-expand,.wy-menu-vertical li .nav button.toctree-expand,.wy-menu-vertical li.on a .btn button.toctree-expand,.wy-menu-vertical li.on a .nav button.toctree-expand{display:inline}.btn .fa-large.icon,.btn .fa.fa-large,.btn .rst-content .code-block-caption .fa-large.headerlink,.btn .rst-content .eqno .fa-large.headerlink,.btn .rst-content .fa-large.admonition-title,.btn .rst-content code.download span.fa-large:first-child,.btn .rst-content dl dt .fa-large.headerlink,.btn .rst-content h1 .fa-large.headerlink,.btn .rst-content h2 .fa-large.headerlink,.btn .rst-content h3 .fa-large.headerlink,.btn .rst-content h4 .fa-large.headerlink,.btn .rst-content h5 .fa-large.headerlink,.btn .rst-content h6 .fa-large.headerlink,.btn .rst-content p .fa-large.headerlink,.btn .rst-content table>caption .fa-large.headerlink,.btn .rst-content tt.download span.fa-large:first-child,.btn .wy-menu-vertical li button.fa-large.toctree-expand,.nav .fa-large.icon,.nav .fa.fa-large,.nav .rst-content .code-block-caption .fa-large.headerlink,.nav .rst-content .eqno .fa-large.headerlink,.nav .rst-content .fa-large.admonition-title,.nav .rst-content code.download span.fa-large:first-child,.nav .rst-content dl dt .fa-large.headerlink,.nav .rst-content h1 .fa-large.headerlink,.nav .rst-content h2 .fa-large.headerlink,.nav .rst-content h3 .fa-large.headerlink,.nav .rst-content h4 .fa-large.headerlink,.nav .rst-content h5 .fa-large.headerlink,.nav .rst-content h6 .fa-large.headerlink,.nav .rst-content p .fa-large.headerlink,.nav .rst-content table>caption .fa-large.headerlink,.nav .rst-content tt.download span.fa-large:first-child,.nav .wy-menu-vertical li button.fa-large.toctree-expand,.rst-content .btn .fa-large.admonition-title,.rst-content .code-block-caption .btn .fa-large.headerlink,.rst-content .code-block-caption .nav .fa-large.headerlink,.rst-content .eqno .btn .fa-large.headerlink,.rst-content .eqno .nav .fa-large.headerlink,.rst-content .nav .fa-large.admonition-title,.rst-content code.download .btn span.fa-large:first-child,.rst-content code.download .nav span.fa-large:first-child,.rst-content dl dt .btn .fa-large.headerlink,.rst-content dl dt .nav .fa-large.headerlink,.rst-content h1 .btn .fa-large.headerlink,.rst-content h1 .nav .fa-large.headerlink,.rst-content h2 .btn .fa-large.headerlink,.rst-content h2 .nav .fa-large.headerlink,.rst-content h3 .btn .fa-large.headerlink,.rst-content h3 .nav .fa-large.headerlink,.rst-content h4 .btn .fa-large.headerlink,.rst-content h4 .nav .fa-large.headerlink,.rst-content h5 .btn .fa-large.headerlink,.rst-content h5 .nav .fa-large.headerlink,.rst-content h6 .btn .fa-large.headerlink,.rst-content h6 .nav .fa-large.headerlink,.rst-content p .btn .fa-large.headerlink,.rst-content p .nav .fa-large.headerlink,.rst-content table>caption .btn .fa-large.headerlink,.rst-content table>caption .nav .fa-large.headerlink,.rst-content tt.download .btn span.fa-large:first-child,.rst-content tt.download .nav span.fa-large:first-child,.wy-menu-vertical li .btn button.fa-large.toctree-expand,.wy-menu-vertical li .nav button.fa-large.toctree-expand{line-height:.9em}.btn .fa-spin.icon,.btn .fa.fa-spin,.btn .rst-content .code-block-caption .fa-spin.headerlink,.btn .rst-content .eqno .fa-spin.headerlink,.btn .rst-content .fa-spin.admonition-title,.btn .rst-content code.download span.fa-spin:first-child,.btn .rst-content dl dt .fa-spin.headerlink,.btn .rst-content h1 .fa-spin.headerlink,.btn .rst-content h2 .fa-spin.headerlink,.btn .rst-content h3 .fa-spin.headerlink,.btn .rst-content h4 .fa-spin.headerlink,.btn .rst-content h5 .fa-spin.headerlink,.btn .rst-content h6 .fa-spin.headerlink,.btn .rst-content p .fa-spin.headerlink,.btn .rst-content table>caption .fa-spin.headerlink,.btn .rst-content tt.download span.fa-spin:first-child,.btn .wy-menu-vertical li button.fa-spin.toctree-expand,.nav .fa-spin.icon,.nav .fa.fa-spin,.nav .rst-content .code-block-caption .fa-spin.headerlink,.nav .rst-content .eqno .fa-spin.headerlink,.nav .rst-content .fa-spin.admonition-title,.nav .rst-content code.download span.fa-spin:first-child,.nav .rst-content dl dt .fa-spin.headerlink,.nav .rst-content h1 .fa-spin.headerlink,.nav .rst-content h2 .fa-spin.headerlink,.nav .rst-content h3 .fa-spin.headerlink,.nav .rst-content h4 .fa-spin.headerlink,.nav .rst-content h5 .fa-spin.headerlink,.nav .rst-content h6 .fa-spin.headerlink,.nav .rst-content p .fa-spin.headerlink,.nav .rst-content table>caption .fa-spin.headerlink,.nav .rst-content tt.download span.fa-spin:first-child,.nav .wy-menu-vertical li button.fa-spin.toctree-expand,.rst-content .btn .fa-spin.admonition-title,.rst-content .code-block-caption .btn .fa-spin.headerlink,.rst-content .code-block-caption .nav .fa-spin.headerlink,.rst-content .eqno .btn .fa-spin.headerlink,.rst-content .eqno .nav .fa-spin.headerlink,.rst-content .nav .fa-spin.admonition-title,.rst-content code.download .btn span.fa-spin:first-child,.rst-content code.download .nav span.fa-spin:first-child,.rst-content dl dt .btn .fa-spin.headerlink,.rst-content dl dt .nav .fa-spin.headerlink,.rst-content h1 .btn .fa-spin.headerlink,.rst-content h1 .nav .fa-spin.headerlink,.rst-content h2 .btn .fa-spin.headerlink,.rst-content h2 .nav .fa-spin.headerlink,.rst-content h3 .btn .fa-spin.headerlink,.rst-content h3 .nav .fa-spin.headerlink,.rst-content h4 .btn .fa-spin.headerlink,.rst-content h4 .nav .fa-spin.headerlink,.rst-content h5 .btn .fa-spin.headerlink,.rst-content h5 .nav .fa-spin.headerlink,.rst-content h6 .btn .fa-spin.headerlink,.rst-content h6 .nav .fa-spin.headerlink,.rst-content p .btn .fa-spin.headerlink,.rst-content p .nav .fa-spin.headerlink,.rst-content table>caption .btn .fa-spin.headerlink,.rst-content table>caption .nav .fa-spin.headerlink,.rst-content tt.download .btn span.fa-spin:first-child,.rst-content tt.download .nav span.fa-spin:first-child,.wy-menu-vertical li .btn button.fa-spin.toctree-expand,.wy-menu-vertical li .nav button.fa-spin.toctree-expand{display:inline-block}.btn.fa:before,.btn.icon:before,.rst-content .btn.admonition-title:before,.rst-content .code-block-caption .btn.headerlink:before,.rst-content .eqno .btn.headerlink:before,.rst-content code.download span.btn:first-child:before,.rst-content dl dt .btn.headerlink:before,.rst-content h1 .btn.headerlink:before,.rst-content h2 .btn.headerlink:before,.rst-content h3 .btn.headerlink:before,.rst-content h4 .btn.headerlink:before,.rst-content h5 .btn.headerlink:before,.rst-content h6 .btn.headerlink:before,.rst-content p .btn.headerlink:before,.rst-content table>caption .btn.headerlink:before,.rst-content tt.download span.btn:first-child:before,.wy-menu-vertical li button.btn.toctree-expand:before{opacity:.5;-webkit-transition:opacity .05s ease-in;-moz-transition:opacity .05s ease-in;transition:opacity .05s ease-in}.btn.fa:hover:before,.btn.icon:hover:before,.rst-content .btn.admonition-title:hover:before,.rst-content .code-block-caption .btn.headerlink:hover:before,.rst-content .eqno .btn.headerlink:hover:before,.rst-content code.download span.btn:first-child:hover:before,.rst-content dl dt .btn.headerlink:hover:before,.rst-content h1 .btn.headerlink:hover:before,.rst-content h2 .btn.headerlink:hover:before,.rst-content h3 .btn.headerlink:hover:before,.rst-content h4 .btn.headerlink:hover:before,.rst-content h5 .btn.headerlink:hover:before,.rst-content h6 .btn.headerlink:hover:before,.rst-content p .btn.headerlink:hover:before,.rst-content table>caption .btn.headerlink:hover:before,.rst-content tt.download span.btn:first-child:hover:before,.wy-menu-vertical li button.btn.toctree-expand:hover:before{opacity:1}.btn-mini .fa:before,.btn-mini .icon:before,.btn-mini .rst-content .admonition-title:before,.btn-mini .rst-content .code-block-caption .headerlink:before,.btn-mini .rst-content .eqno .headerlink:before,.btn-mini .rst-content code.download span:first-child:before,.btn-mini .rst-content dl dt .headerlink:before,.btn-mini .rst-content h1 .headerlink:before,.btn-mini .rst-content h2 .headerlink:before,.btn-mini .rst-content h3 .headerlink:before,.btn-mini .rst-content h4 .headerlink:before,.btn-mini .rst-content h5 .headerlink:before,.btn-mini .rst-content h6 .headerlink:before,.btn-mini .rst-content p .headerlink:before,.btn-mini .rst-content table>caption .headerlink:before,.btn-mini .rst-content tt.download span:first-child:before,.btn-mini .wy-menu-vertical li button.toctree-expand:before,.rst-content .btn-mini .admonition-title:before,.rst-content .code-block-caption .btn-mini .headerlink:before,.rst-content .eqno .btn-mini .headerlink:before,.rst-content code.download .btn-mini span:first-child:before,.rst-content dl dt .btn-mini .headerlink:before,.rst-content h1 .btn-mini .headerlink:before,.rst-content h2 .btn-mini .headerlink:before,.rst-content h3 .btn-mini .headerlink:before,.rst-content h4 .btn-mini .headerlink:before,.rst-content h5 .btn-mini .headerlink:before,.rst-content h6 .btn-mini .headerlink:before,.rst-content p .btn-mini .headerlink:before,.rst-content table>caption .btn-mini .headerlink:before,.rst-content tt.download .btn-mini span:first-child:before,.wy-menu-vertical li .btn-mini button.toctree-expand:before{font-size:14px;vertical-align:-15%}.rst-content .admonition,.rst-content .admonition-todo,.rst-content .attention,.rst-content .caution,.rst-content .danger,.rst-content .error,.rst-content .hint,.rst-content .important,.rst-content .note,.rst-content .seealso,.rst-content .tip,.rst-content .warning,.wy-alert{padding:12px;line-height:24px;margin-bottom:24px;background:#e7f2fa}.rst-content .admonition-title,.wy-alert-title{font-weight:700;display:block;color:#fff;background:#6ab0de;padding:6px 12px;margin:-12px -12px 12px}.rst-content .danger,.rst-content .error,.rst-content .wy-alert-danger.admonition,.rst-content .wy-alert-danger.admonition-todo,.rst-content .wy-alert-danger.attention,.rst-content .wy-alert-danger.caution,.rst-content .wy-alert-danger.hint,.rst-content .wy-alert-danger.important,.rst-content .wy-alert-danger.note,.rst-content .wy-alert-danger.seealso,.rst-content .wy-alert-danger.tip,.rst-content .wy-alert-danger.warning,.wy-alert.wy-alert-danger{background:#fdf3f2}.rst-content .danger .admonition-title,.rst-content .danger .wy-alert-title,.rst-content .error .admonition-title,.rst-content .error .wy-alert-title,.rst-content .wy-alert-danger.admonition-todo .admonition-title,.rst-content .wy-alert-danger.admonition-todo .wy-alert-title,.rst-content .wy-alert-danger.admonition .admonition-title,.rst-content .wy-alert-danger.admonition .wy-alert-title,.rst-content .wy-alert-danger.attention .admonition-title,.rst-content .wy-alert-danger.attention .wy-alert-title,.rst-content .wy-alert-danger.caution .admonition-title,.rst-content .wy-alert-danger.caution .wy-alert-title,.rst-content .wy-alert-danger.hint .admonition-title,.rst-content .wy-alert-danger.hint .wy-alert-title,.rst-content .wy-alert-danger.important .admonition-title,.rst-content .wy-alert-danger.important .wy-alert-title,.rst-content .wy-alert-danger.note .admonition-title,.rst-content .wy-alert-danger.note .wy-alert-title,.rst-content .wy-alert-danger.seealso .admonition-title,.rst-content .wy-alert-danger.seealso .wy-alert-title,.rst-content .wy-alert-danger.tip .admonition-title,.rst-content .wy-alert-danger.tip .wy-alert-title,.rst-content .wy-alert-danger.warning .admonition-title,.rst-content .wy-alert-danger.warning .wy-alert-title,.rst-content .wy-alert.wy-alert-danger .admonition-title,.wy-alert.wy-alert-danger .rst-content .admonition-title,.wy-alert.wy-alert-danger .wy-alert-title{background:#f29f97}.rst-content .admonition-todo,.rst-content .attention,.rst-content .caution,.rst-content .warning,.rst-content .wy-alert-warning.admonition,.rst-content .wy-alert-warning.danger,.rst-content .wy-alert-warning.error,.rst-content .wy-alert-warning.hint,.rst-content .wy-alert-warning.important,.rst-content .wy-alert-warning.note,.rst-content .wy-alert-warning.seealso,.rst-content .wy-alert-warning.tip,.wy-alert.wy-alert-warning{background:#ffedcc}.rst-content .admonition-todo .admonition-title,.rst-content .admonition-todo .wy-alert-title,.rst-content .attention .admonition-title,.rst-content .attention .wy-alert-title,.rst-content .caution .admonition-title,.rst-content .caution .wy-alert-title,.rst-content .warning .admonition-title,.rst-content .warning .wy-alert-title,.rst-content .wy-alert-warning.admonition .admonition-title,.rst-content .wy-alert-warning.admonition .wy-alert-title,.rst-content .wy-alert-warning.danger .admonition-title,.rst-content .wy-alert-warning.danger .wy-alert-title,.rst-content .wy-alert-warning.error .admonition-title,.rst-content .wy-alert-warning.error .wy-alert-title,.rst-content .wy-alert-warning.hint .admonition-title,.rst-content .wy-alert-warning.hint .wy-alert-title,.rst-content .wy-alert-warning.important .admonition-title,.rst-content .wy-alert-warning.important .wy-alert-title,.rst-content .wy-alert-warning.note .admonition-title,.rst-content .wy-alert-warning.note .wy-alert-title,.rst-content .wy-alert-warning.seealso .admonition-title,.rst-content .wy-alert-warning.seealso .wy-alert-title,.rst-content .wy-alert-warning.tip .admonition-title,.rst-content .wy-alert-warning.tip .wy-alert-title,.rst-content .wy-alert.wy-alert-warning .admonition-title,.wy-alert.wy-alert-warning .rst-content .admonition-title,.wy-alert.wy-alert-warning .wy-alert-title{background:#f0b37e}.rst-content .note,.rst-content .seealso,.rst-content .wy-alert-info.admonition,.rst-content .wy-alert-info.admonition-todo,.rst-content .wy-alert-info.attention,.rst-content .wy-alert-info.caution,.rst-content .wy-alert-info.danger,.rst-content .wy-alert-info.error,.rst-content .wy-alert-info.hint,.rst-content .wy-alert-info.important,.rst-content .wy-alert-info.tip,.rst-content .wy-alert-info.warning,.wy-alert.wy-alert-info{background:#e7f2fa}.rst-content .note .admonition-title,.rst-content .note .wy-alert-title,.rst-content .seealso .admonition-title,.rst-content .seealso .wy-alert-title,.rst-content .wy-alert-info.admonition-todo .admonition-title,.rst-content .wy-alert-info.admonition-todo .wy-alert-title,.rst-content .wy-alert-info.admonition .admonition-title,.rst-content .wy-alert-info.admonition .wy-alert-title,.rst-content .wy-alert-info.attention .admonition-title,.rst-content .wy-alert-info.attention .wy-alert-title,.rst-content .wy-alert-info.caution .admonition-title,.rst-content .wy-alert-info.caution .wy-alert-title,.rst-content .wy-alert-info.danger .admonition-title,.rst-content .wy-alert-info.danger .wy-alert-title,.rst-content .wy-alert-info.error .admonition-title,.rst-content .wy-alert-info.error .wy-alert-title,.rst-content .wy-alert-info.hint .admonition-title,.rst-content .wy-alert-info.hint .wy-alert-title,.rst-content .wy-alert-info.important .admonition-title,.rst-content .wy-alert-info.important .wy-alert-title,.rst-content .wy-alert-info.tip .admonition-title,.rst-content .wy-alert-info.tip .wy-alert-title,.rst-content .wy-alert-info.warning .admonition-title,.rst-content .wy-alert-info.warning .wy-alert-title,.rst-content .wy-alert.wy-alert-info .admonition-title,.wy-alert.wy-alert-info .rst-content .admonition-title,.wy-alert.wy-alert-info .wy-alert-title{background:#6ab0de}.rst-content .hint,.rst-content .important,.rst-content .tip,.rst-content .wy-alert-success.admonition,.rst-content .wy-alert-success.admonition-todo,.rst-content .wy-alert-success.attention,.rst-content .wy-alert-success.caution,.rst-content .wy-alert-success.danger,.rst-content .wy-alert-success.error,.rst-content .wy-alert-success.note,.rst-content .wy-alert-success.seealso,.rst-content .wy-alert-success.warning,.wy-alert.wy-alert-success{background:#dbfaf4}.rst-content .hint .admonition-title,.rst-content .hint .wy-alert-title,.rst-content .important .admonition-title,.rst-content .important .wy-alert-title,.rst-content .tip .admonition-title,.rst-content .tip .wy-alert-title,.rst-content .wy-alert-success.admonition-todo .admonition-title,.rst-content .wy-alert-success.admonition-todo .wy-alert-title,.rst-content .wy-alert-success.admonition .admonition-title,.rst-content .wy-alert-success.admonition .wy-alert-title,.rst-content .wy-alert-success.attention .admonition-title,.rst-content .wy-alert-success.attention .wy-alert-title,.rst-content .wy-alert-success.caution .admonition-title,.rst-content .wy-alert-success.caution .wy-alert-title,.rst-content .wy-alert-success.danger .admonition-title,.rst-content .wy-alert-success.danger .wy-alert-title,.rst-content .wy-alert-success.error .admonition-title,.rst-content .wy-alert-success.error .wy-alert-title,.rst-content .wy-alert-success.note .admonition-title,.rst-content .wy-alert-success.note .wy-alert-title,.rst-content .wy-alert-success.seealso .admonition-title,.rst-content .wy-alert-success.seealso .wy-alert-title,.rst-content .wy-alert-success.warning .admonition-title,.rst-content .wy-alert-success.warning .wy-alert-title,.rst-content .wy-alert.wy-alert-success .admonition-title,.wy-alert.wy-alert-success .rst-content .admonition-title,.wy-alert.wy-alert-success .wy-alert-title{background:#1abc9c}.rst-content .wy-alert-neutral.admonition,.rst-content .wy-alert-neutral.admonition-todo,.rst-content .wy-alert-neutral.attention,.rst-content .wy-alert-neutral.caution,.rst-content .wy-alert-neutral.danger,.rst-content .wy-alert-neutral.error,.rst-content .wy-alert-neutral.hint,.rst-content .wy-alert-neutral.important,.rst-content .wy-alert-neutral.note,.rst-content .wy-alert-neutral.seealso,.rst-content .wy-alert-neutral.tip,.rst-content .wy-alert-neutral.warning,.wy-alert.wy-alert-neutral{background:#f3f6f6}.rst-content .wy-alert-neutral.admonition-todo .admonition-title,.rst-content .wy-alert-neutral.admonition-todo .wy-alert-title,.rst-content .wy-alert-neutral.admonition .admonition-title,.rst-content .wy-alert-neutral.admonition .wy-alert-title,.rst-content .wy-alert-neutral.attention .admonition-title,.rst-content .wy-alert-neutral.attention .wy-alert-title,.rst-content .wy-alert-neutral.caution .admonition-title,.rst-content .wy-alert-neutral.caution .wy-alert-title,.rst-content .wy-alert-neutral.danger .admonition-title,.rst-content .wy-alert-neutral.danger .wy-alert-title,.rst-content .wy-alert-neutral.error .admonition-title,.rst-content .wy-alert-neutral.error .wy-alert-title,.rst-content .wy-alert-neutral.hint .admonition-title,.rst-content .wy-alert-neutral.hint .wy-alert-title,.rst-content .wy-alert-neutral.important .admonition-title,.rst-content .wy-alert-neutral.important .wy-alert-title,.rst-content .wy-alert-neutral.note .admonition-title,.rst-content .wy-alert-neutral.note .wy-alert-title,.rst-content .wy-alert-neutral.seealso .admonition-title,.rst-content .wy-alert-neutral.seealso .wy-alert-title,.rst-content .wy-alert-neutral.tip .admonition-title,.rst-content .wy-alert-neutral.tip .wy-alert-title,.rst-content .wy-alert-neutral.warning .admonition-title,.rst-content .wy-alert-neutral.warning .wy-alert-title,.rst-content .wy-alert.wy-alert-neutral .admonition-title,.wy-alert.wy-alert-neutral .rst-content .admonition-title,.wy-alert.wy-alert-neutral .wy-alert-title{color:#404040;background:#e1e4e5}.rst-content .wy-alert-neutral.admonition-todo a,.rst-content .wy-alert-neutral.admonition a,.rst-content .wy-alert-neutral.attention a,.rst-content .wy-alert-neutral.caution a,.rst-content .wy-alert-neutral.danger a,.rst-content .wy-alert-neutral.error a,.rst-content .wy-alert-neutral.hint a,.rst-content .wy-alert-neutral.important a,.rst-content .wy-alert-neutral.note a,.rst-content .wy-alert-neutral.seealso a,.rst-content .wy-alert-neutral.tip a,.rst-content .wy-alert-neutral.warning a,.wy-alert.wy-alert-neutral a{color:#2980b9}.rst-content .admonition-todo p:last-child,.rst-content .admonition p:last-child,.rst-content .attention p:last-child,.rst-content .caution p:last-child,.rst-content .danger p:last-child,.rst-content .error p:last-child,.rst-content .hint p:last-child,.rst-content .important p:last-child,.rst-content .note p:last-child,.rst-content .seealso p:last-child,.rst-content .tip p:last-child,.rst-content .warning p:last-child,.wy-alert p:last-child{margin-bottom:0}.wy-tray-container{position:fixed;bottom:0;left:0;z-index:600}.wy-tray-container li{display:block;width:300px;background:transparent;color:#fff;text-align:center;box-shadow:0 5px 5px 0 rgba(0,0,0,.1);padding:0 24px;min-width:20%;opacity:0;height:0;line-height:56px;overflow:hidden;-webkit-transition:all .3s ease-in;-moz-transition:all .3s ease-in;transition:all .3s ease-in}.wy-tray-container li.wy-tray-item-success{background:#27ae60}.wy-tray-container li.wy-tray-item-info{background:#2980b9}.wy-tray-container li.wy-tray-item-warning{background:#e67e22}.wy-tray-container li.wy-tray-item-danger{background:#e74c3c}.wy-tray-container li.on{opacity:1;height:56px}@media screen and (max-width:768px){.wy-tray-container{bottom:auto;top:0;width:100%}.wy-tray-container li{width:100%}}button{font-size:100%;margin:0;vertical-align:baseline;*vertical-align:middle;cursor:pointer;line-height:normal;-webkit-appearance:button;*overflow:visible}button::-moz-focus-inner,input::-moz-focus-inner{border:0;padding:0}button[disabled]{cursor:default}.btn{display:inline-block;border-radius:2px;line-height:normal;white-space:nowrap;text-align:center;cursor:pointer;font-size:100%;padding:6px 12px 8px;color:#fff;border:1px solid rgba(0,0,0,.1);background-color:#27ae60;text-decoration:none;font-weight:400;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;box-shadow:inset 0 1px 2px -1px hsla(0,0%,100%,.5),inset 0 -2px 0 0 rgba(0,0,0,.1);outline-none:false;vertical-align:middle;*display:inline;zoom:1;-webkit-user-drag:none;-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;user-select:none;-webkit-transition:all .1s linear;-moz-transition:all .1s linear;transition:all .1s linear}.btn-hover{background:#2e8ece;color:#fff}.btn:hover{background:#2cc36b;color:#fff}.btn:focus{background:#2cc36b;outline:0}.btn:active{box-shadow:inset 0 -1px 0 0 rgba(0,0,0,.05),inset 0 2px 0 0 rgba(0,0,0,.1);padding:8px 12px 6px}.btn:visited{color:#fff}.btn-disabled,.btn-disabled:active,.btn-disabled:focus,.btn-disabled:hover,.btn:disabled{background-image:none;filter:progid:DXImageTransform.Microsoft.gradient(enabled = false);filter:alpha(opacity=40);opacity:.4;cursor:not-allowed;box-shadow:none}.btn::-moz-focus-inner{padding:0;border:0}.btn-small{font-size:80%}.btn-info{background-color:#2980b9!important}.btn-info:hover{background-color:#2e8ece!important}.btn-neutral{background-color:#f3f6f6!important;color:#404040!important}.btn-neutral:hover{background-color:#e5ebeb!important;color:#404040}.btn-neutral:visited{color:#404040!important}.btn-success{background-color:#27ae60!important}.btn-success:hover{background-color:#295!important}.btn-danger{background-color:#e74c3c!important}.btn-danger:hover{background-color:#ea6153!important}.btn-warning{background-color:#e67e22!important}.btn-warning:hover{background-color:#e98b39!important}.btn-invert{background-color:#222}.btn-invert:hover{background-color:#2f2f2f!important}.btn-link{background-color:transparent!important;color:#2980b9;box-shadow:none;border-color:transparent!important}.btn-link:active,.btn-link:hover{background-color:transparent!important;color:#409ad5!important;box-shadow:none}.btn-link:visited{color:#9b59b6}.wy-btn-group .btn,.wy-control .btn{vertical-align:middle}.wy-btn-group{margin-bottom:24px;*zoom:1}.wy-btn-group:after,.wy-btn-group:before{display:table;content:""}.wy-btn-group:after{clear:both}.wy-dropdown{position:relative;display:inline-block}.wy-dropdown-active .wy-dropdown-menu{display:block}.wy-dropdown-menu{position:absolute;left:0;display:none;float:left;top:100%;min-width:100%;background:#fcfcfc;z-index:100;border:1px solid #cfd7dd;box-shadow:0 2px 2px 0 rgba(0,0,0,.1);padding:12px}.wy-dropdown-menu>dd>a{display:block;clear:both;color:#404040;white-space:nowrap;font-size:90%;padding:0 12px;cursor:pointer}.wy-dropdown-menu>dd>a:hover{background:#2980b9;color:#fff}.wy-dropdown-menu>dd.divider{border-top:1px solid #cfd7dd;margin:6px 0}.wy-dropdown-menu>dd.search{padding-bottom:12px}.wy-dropdown-menu>dd.search input[type=search]{width:100%}.wy-dropdown-menu>dd.call-to-action{background:#e3e3e3;text-transform:uppercase;font-weight:500;font-size:80%}.wy-dropdown-menu>dd.call-to-action:hover{background:#e3e3e3}.wy-dropdown-menu>dd.call-to-action .btn{color:#fff}.wy-dropdown.wy-dropdown-up .wy-dropdown-menu{bottom:100%;top:auto;left:auto;right:0}.wy-dropdown.wy-dropdown-bubble .wy-dropdown-menu{background:#fcfcfc;margin-top:2px}.wy-dropdown.wy-dropdown-bubble .wy-dropdown-menu a{padding:6px 12px}.wy-dropdown.wy-dropdown-bubble .wy-dropdown-menu a:hover{background:#2980b9;color:#fff}.wy-dropdown.wy-dropdown-left .wy-dropdown-menu{right:0;left:auto;text-align:right}.wy-dropdown-arrow:before{content:" ";border-bottom:5px solid #f5f5f5;border-left:5px solid transparent;border-right:5px solid transparent;position:absolute;display:block;top:-4px;left:50%;margin-left:-3px}.wy-dropdown-arrow.wy-dropdown-arrow-left:before{left:11px}.wy-form-stacked select{display:block}.wy-form-aligned .wy-help-inline,.wy-form-aligned input,.wy-form-aligned label,.wy-form-aligned select,.wy-form-aligned textarea{display:inline-block;*display:inline;*zoom:1;vertical-align:middle}.wy-form-aligned .wy-control-group>label{display:inline-block;vertical-align:middle;width:10em;margin:6px 12px 0 0;float:left}.wy-form-aligned .wy-control{float:left}.wy-form-aligned .wy-control label{display:block}.wy-form-aligned .wy-control select{margin-top:6px}fieldset{margin:0}fieldset,legend{border:0;padding:0}legend{width:100%;white-space:normal;margin-bottom:24px;font-size:150%;*margin-left:-7px}label,legend{display:block}label{margin:0 0 .3125em;color:#333;font-size:90%}input,select,textarea{font-size:100%;margin:0;vertical-align:baseline;*vertical-align:middle}.wy-control-group{margin-bottom:24px;max-width:1200px;margin-left:auto;margin-right:auto;*zoom:1}.wy-control-group:after,.wy-control-group:before{display:table;content:""}.wy-control-group:after{clear:both}.wy-control-group.wy-control-group-required>label:after{content:" *";color:#e74c3c}.wy-control-group .wy-form-full,.wy-control-group .wy-form-halves,.wy-control-group .wy-form-thirds{padding-bottom:12px}.wy-control-group .wy-form-full input[type=color],.wy-control-group .wy-form-full input[type=date],.wy-control-group .wy-form-full input[type=datetime-local],.wy-control-group .wy-form-full input[type=datetime],.wy-control-group .wy-form-full input[type=email],.wy-control-group .wy-form-full input[type=month],.wy-control-group .wy-form-full input[type=number],.wy-control-group .wy-form-full input[type=password],.wy-control-group .wy-form-full input[type=search],.wy-control-group .wy-form-full input[type=tel],.wy-control-group .wy-form-full input[type=text],.wy-control-group .wy-form-full input[type=time],.wy-control-group .wy-form-full input[type=url],.wy-control-group .wy-form-full input[type=week],.wy-control-group .wy-form-full select,.wy-control-group .wy-form-halves input[type=color],.wy-control-group .wy-form-halves input[type=date],.wy-control-group .wy-form-halves input[type=datetime-local],.wy-control-group .wy-form-halves input[type=datetime],.wy-control-group .wy-form-halves input[type=email],.wy-control-group .wy-form-halves input[type=month],.wy-control-group .wy-form-halves input[type=number],.wy-control-group .wy-form-halves input[type=password],.wy-control-group .wy-form-halves input[type=search],.wy-control-group .wy-form-halves input[type=tel],.wy-control-group .wy-form-halves input[type=text],.wy-control-group .wy-form-halves input[type=time],.wy-control-group .wy-form-halves input[type=url],.wy-control-group .wy-form-halves input[type=week],.wy-control-group .wy-form-halves select,.wy-control-group .wy-form-thirds input[type=color],.wy-control-group .wy-form-thirds input[type=date],.wy-control-group .wy-form-thirds input[type=datetime-local],.wy-control-group .wy-form-thirds input[type=datetime],.wy-control-group .wy-form-thirds input[type=email],.wy-control-group .wy-form-thirds input[type=month],.wy-control-group .wy-form-thirds input[type=number],.wy-control-group .wy-form-thirds input[type=password],.wy-control-group .wy-form-thirds input[type=search],.wy-control-group .wy-form-thirds input[type=tel],.wy-control-group .wy-form-thirds input[type=text],.wy-control-group .wy-form-thirds input[type=time],.wy-control-group .wy-form-thirds input[type=url],.wy-control-group .wy-form-thirds input[type=week],.wy-control-group .wy-form-thirds select{width:100%}.wy-control-group .wy-form-full{float:left;display:block;width:100%;margin-right:0}.wy-control-group .wy-form-full:last-child{margin-right:0}.wy-control-group .wy-form-halves{float:left;display:block;margin-right:2.35765%;width:48.82117%}.wy-control-group .wy-form-halves:last-child,.wy-control-group .wy-form-halves:nth-of-type(2n){margin-right:0}.wy-control-group .wy-form-halves:nth-of-type(odd){clear:left}.wy-control-group .wy-form-thirds{float:left;display:block;margin-right:2.35765%;width:31.76157%}.wy-control-group .wy-form-thirds:last-child,.wy-control-group .wy-form-thirds:nth-of-type(3n){margin-right:0}.wy-control-group .wy-form-thirds:nth-of-type(3n+1){clear:left}.wy-control-group.wy-control-group-no-input .wy-control,.wy-control-no-input{margin:6px 0 0;font-size:90%}.wy-control-no-input{display:inline-block}.wy-control-group.fluid-input input[type=color],.wy-control-group.fluid-input input[type=date],.wy-control-group.fluid-input input[type=datetime-local],.wy-control-group.fluid-input input[type=datetime],.wy-control-group.fluid-input input[type=email],.wy-control-group.fluid-input input[type=month],.wy-control-group.fluid-input input[type=number],.wy-control-group.fluid-input input[type=password],.wy-control-group.fluid-input input[type=search],.wy-control-group.fluid-input input[type=tel],.wy-control-group.fluid-input input[type=text],.wy-control-group.fluid-input input[type=time],.wy-control-group.fluid-input input[type=url],.wy-control-group.fluid-input input[type=week]{width:100%}.wy-form-message-inline{padding-left:.3em;color:#666;font-size:90%}.wy-form-message{display:block;color:#999;font-size:70%;margin-top:.3125em;font-style:italic}.wy-form-message p{font-size:inherit;font-style:italic;margin-bottom:6px}.wy-form-message p:last-child{margin-bottom:0}input{line-height:normal}input[type=button],input[type=reset],input[type=submit]{-webkit-appearance:button;cursor:pointer;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;*overflow:visible}input[type=color],input[type=date],input[type=datetime-local],input[type=datetime],input[type=email],input[type=month],input[type=number],input[type=password],input[type=search],input[type=tel],input[type=text],input[type=time],input[type=url],input[type=week]{-webkit-appearance:none;padding:6px;display:inline-block;border:1px solid #ccc;font-size:80%;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;box-shadow:inset 0 1px 3px #ddd;border-radius:0;-webkit-transition:border .3s linear;-moz-transition:border .3s linear;transition:border .3s linear}input[type=datetime-local]{padding:.34375em .625em}input[disabled]{cursor:default}input[type=checkbox],input[type=radio]{padding:0;margin-right:.3125em;*height:13px;*width:13px}input[type=checkbox],input[type=radio],input[type=search]{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}input[type=search]::-webkit-search-cancel-button,input[type=search]::-webkit-search-decoration{-webkit-appearance:none}input[type=color]:focus,input[type=date]:focus,input[type=datetime-local]:focus,input[type=datetime]:focus,input[type=email]:focus,input[type=month]:focus,input[type=number]:focus,input[type=password]:focus,input[type=search]:focus,input[type=tel]:focus,input[type=text]:focus,input[type=time]:focus,input[type=url]:focus,input[type=week]:focus{outline:0;outline:thin dotted\9;border-color:#333}input.no-focus:focus{border-color:#ccc!important}input[type=checkbox]:focus,input[type=file]:focus,input[type=radio]:focus{outline:thin dotted #333;outline:1px auto #129fea}input[type=color][disabled],input[type=date][disabled],input[type=datetime-local][disabled],input[type=datetime][disabled],input[type=email][disabled],input[type=month][disabled],input[type=number][disabled],input[type=password][disabled],input[type=search][disabled],input[type=tel][disabled],input[type=text][disabled],input[type=time][disabled],input[type=url][disabled],input[type=week][disabled]{cursor:not-allowed;background-color:#fafafa}input:focus:invalid,select:focus:invalid,textarea:focus:invalid{color:#e74c3c;border:1px solid #e74c3c}input:focus:invalid:focus,select:focus:invalid:focus,textarea:focus:invalid:focus{border-color:#e74c3c}input[type=checkbox]:focus:invalid:focus,input[type=file]:focus:invalid:focus,input[type=radio]:focus:invalid:focus{outline-color:#e74c3c}input.wy-input-large{padding:12px;font-size:100%}textarea{overflow:auto;vertical-align:top;width:100%;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif}select,textarea{padding:.5em .625em;display:inline-block;border:1px solid #ccc;font-size:80%;box-shadow:inset 0 1px 3px #ddd;-webkit-transition:border .3s linear;-moz-transition:border .3s linear;transition:border .3s linear}select{border:1px solid #ccc;background-color:#fff}select[multiple]{height:auto}select:focus,textarea:focus{outline:0}input[readonly],select[disabled],select[readonly],textarea[disabled],textarea[readonly]{cursor:not-allowed;background-color:#fafafa}input[type=checkbox][disabled],input[type=radio][disabled]{cursor:not-allowed}.wy-checkbox,.wy-radio{margin:6px 0;color:#404040;display:block}.wy-checkbox input,.wy-radio input{vertical-align:baseline}.wy-form-message-inline{display:inline-block;*display:inline;*zoom:1;vertical-align:middle}.wy-input-prefix,.wy-input-suffix{white-space:nowrap;padding:6px}.wy-input-prefix .wy-input-context,.wy-input-suffix .wy-input-context{line-height:27px;padding:0 8px;display:inline-block;font-size:80%;background-color:#f3f6f6;border:1px solid #ccc;color:#999}.wy-input-suffix .wy-input-context{border-left:0}.wy-input-prefix .wy-input-context{border-right:0}.wy-switch{position:relative;display:block;height:24px;margin-top:12px;cursor:pointer}.wy-switch:before{left:0;top:0;width:36px;height:12px;background:#ccc}.wy-switch:after,.wy-switch:before{position:absolute;content:"";display:block;border-radius:4px;-webkit-transition:all .2s ease-in-out;-moz-transition:all .2s ease-in-out;transition:all .2s ease-in-out}.wy-switch:after{width:18px;height:18px;background:#999;left:-3px;top:-3px}.wy-switch span{position:absolute;left:48px;display:block;font-size:12px;color:#ccc;line-height:1}.wy-switch.active:before{background:#1e8449}.wy-switch.active:after{left:24px;background:#27ae60}.wy-switch.disabled{cursor:not-allowed;opacity:.8}.wy-control-group.wy-control-group-error .wy-form-message,.wy-control-group.wy-control-group-error>label{color:#e74c3c}.wy-control-group.wy-control-group-error input[type=color],.wy-control-group.wy-control-group-error input[type=date],.wy-control-group.wy-control-group-error input[type=datetime-local],.wy-control-group.wy-control-group-error input[type=datetime],.wy-control-group.wy-control-group-error input[type=email],.wy-control-group.wy-control-group-error input[type=month],.wy-control-group.wy-control-group-error input[type=number],.wy-control-group.wy-control-group-error input[type=password],.wy-control-group.wy-control-group-error input[type=search],.wy-control-group.wy-control-group-error input[type=tel],.wy-control-group.wy-control-group-error input[type=text],.wy-control-group.wy-control-group-error input[type=time],.wy-control-group.wy-control-group-error input[type=url],.wy-control-group.wy-control-group-error input[type=week],.wy-control-group.wy-control-group-error textarea{border:1px solid #e74c3c}.wy-inline-validate{white-space:nowrap}.wy-inline-validate .wy-input-context{padding:.5em .625em;display:inline-block;font-size:80%}.wy-inline-validate.wy-inline-validate-success .wy-input-context{color:#27ae60}.wy-inline-validate.wy-inline-validate-danger .wy-input-context{color:#e74c3c}.wy-inline-validate.wy-inline-validate-warning .wy-input-context{color:#e67e22}.wy-inline-validate.wy-inline-validate-info .wy-input-context{color:#2980b9}.rotate-90{-webkit-transform:rotate(90deg);-moz-transform:rotate(90deg);-ms-transform:rotate(90deg);-o-transform:rotate(90deg);transform:rotate(90deg)}.rotate-180{-webkit-transform:rotate(180deg);-moz-transform:rotate(180deg);-ms-transform:rotate(180deg);-o-transform:rotate(180deg);transform:rotate(180deg)}.rotate-270{-webkit-transform:rotate(270deg);-moz-transform:rotate(270deg);-ms-transform:rotate(270deg);-o-transform:rotate(270deg);transform:rotate(270deg)}.mirror{-webkit-transform:scaleX(-1);-moz-transform:scaleX(-1);-ms-transform:scaleX(-1);-o-transform:scaleX(-1);transform:scaleX(-1)}.mirror.rotate-90{-webkit-transform:scaleX(-1) rotate(90deg);-moz-transform:scaleX(-1) rotate(90deg);-ms-transform:scaleX(-1) rotate(90deg);-o-transform:scaleX(-1) rotate(90deg);transform:scaleX(-1) rotate(90deg)}.mirror.rotate-180{-webkit-transform:scaleX(-1) rotate(180deg);-moz-transform:scaleX(-1) rotate(180deg);-ms-transform:scaleX(-1) rotate(180deg);-o-transform:scaleX(-1) rotate(180deg);transform:scaleX(-1) rotate(180deg)}.mirror.rotate-270{-webkit-transform:scaleX(-1) rotate(270deg);-moz-transform:scaleX(-1) rotate(270deg);-ms-transform:scaleX(-1) rotate(270deg);-o-transform:scaleX(-1) rotate(270deg);transform:scaleX(-1) rotate(270deg)}@media only screen and (max-width:480px){.wy-form button[type=submit]{margin:.7em 0 0}.wy-form input[type=color],.wy-form input[type=date],.wy-form input[type=datetime-local],.wy-form input[type=datetime],.wy-form input[type=email],.wy-form input[type=month],.wy-form input[type=number],.wy-form input[type=password],.wy-form input[type=search],.wy-form input[type=tel],.wy-form input[type=text],.wy-form input[type=time],.wy-form input[type=url],.wy-form input[type=week],.wy-form label{margin-bottom:.3em;display:block}.wy-form input[type=color],.wy-form input[type=date],.wy-form input[type=datetime-local],.wy-form input[type=datetime],.wy-form input[type=email],.wy-form input[type=month],.wy-form input[type=number],.wy-form input[type=password],.wy-form input[type=search],.wy-form input[type=tel],.wy-form input[type=time],.wy-form input[type=url],.wy-form input[type=week]{margin-bottom:0}.wy-form-aligned .wy-control-group label{margin-bottom:.3em;text-align:left;display:block;width:100%}.wy-form-aligned .wy-control{margin:1.5em 0 0}.wy-form-message,.wy-form-message-inline,.wy-form .wy-help-inline{display:block;font-size:80%;padding:6px 0}}@media screen and (max-width:768px){.tablet-hide{display:none}}@media screen and (max-width:480px){.mobile-hide{display:none}}.float-left{float:left}.float-right{float:right}.full-width{width:100%}.rst-content table.docutils,.rst-content table.field-list,.wy-table{border-collapse:collapse;border-spacing:0;empty-cells:show;margin-bottom:24px}.rst-content table.docutils caption,.rst-content table.field-list caption,.wy-table caption{color:#000;font:italic 85%/1 arial,sans-serif;padding:1em 0;text-align:center}.rst-content table.docutils td,.rst-content table.docutils th,.rst-content table.field-list td,.rst-content table.field-list th,.wy-table td,.wy-table th{font-size:90%;margin:0;overflow:visible;padding:8px 16px}.rst-content table.docutils td:first-child,.rst-content table.docutils th:first-child,.rst-content table.field-list td:first-child,.rst-content table.field-list th:first-child,.wy-table td:first-child,.wy-table th:first-child{border-left-width:0}.rst-content table.docutils thead,.rst-content table.field-list thead,.wy-table thead{color:#000;text-align:left;vertical-align:bottom;white-space:nowrap}.rst-content table.docutils thead th,.rst-content table.field-list thead th,.wy-table thead th{font-weight:700;border-bottom:2px solid #e1e4e5}.rst-content table.docutils td,.rst-content table.field-list td,.wy-table td{background-color:transparent;vertical-align:middle}.rst-content table.docutils td p,.rst-content table.field-list td p,.wy-table td p{line-height:18px}.rst-content table.docutils td p:last-child,.rst-content table.field-list td p:last-child,.wy-table td p:last-child{margin-bottom:0}.rst-content table.docutils .wy-table-cell-min,.rst-content table.field-list .wy-table-cell-min,.wy-table .wy-table-cell-min{width:1%;padding-right:0}.rst-content table.docutils .wy-table-cell-min input[type=checkbox],.rst-content table.field-list .wy-table-cell-min input[type=checkbox],.wy-table .wy-table-cell-min input[type=checkbox]{margin:0}.wy-table-secondary{color:grey;font-size:90%}.wy-table-tertiary{color:grey;font-size:80%}.rst-content table.docutils:not(.field-list) tr:nth-child(2n-1) td,.wy-table-backed,.wy-table-odd td,.wy-table-striped tr:nth-child(2n-1) td{background-color:#f3f6f6}.rst-content table.docutils,.wy-table-bordered-all{border:1px solid #e1e4e5}.rst-content table.docutils td,.wy-table-bordered-all td{border-bottom:1px solid #e1e4e5;border-left:1px solid #e1e4e5}.rst-content table.docutils tbody>tr:last-child td,.wy-table-bordered-all tbody>tr:last-child td{border-bottom-width:0}.wy-table-bordered{border:1px solid #e1e4e5}.wy-table-bordered-rows td{border-bottom:1px solid #e1e4e5}.wy-table-bordered-rows tbody>tr:last-child td{border-bottom-width:0}.wy-table-horizontal td,.wy-table-horizontal th{border-width:0 0 1px;border-bottom:1px solid #e1e4e5}.wy-table-horizontal tbody>tr:last-child td{border-bottom-width:0}.wy-table-responsive{margin-bottom:24px;max-width:100%;overflow:auto}.wy-table-responsive table{margin-bottom:0!important}.wy-table-responsive table td,.wy-table-responsive table th{white-space:nowrap}a{color:#2980b9;text-decoration:none;cursor:pointer}a:hover{color:#3091d1}a:visited{color:#9b59b6}html{height:100%}body,html{overflow-x:hidden}body{font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;font-weight:400;color:#404040;min-height:100%;background:#edf0f2}.wy-text-left{text-align:left}.wy-text-center{text-align:center}.wy-text-right{text-align:right}.wy-text-large{font-size:120%}.wy-text-normal{font-size:100%}.wy-text-small,small{font-size:80%}.wy-text-strike{text-decoration:line-through}.wy-text-warning{color:#e67e22!important}a.wy-text-warning:hover{color:#eb9950!important}.wy-text-info{color:#2980b9!important}a.wy-text-info:hover{color:#409ad5!important}.wy-text-success{color:#27ae60!important}a.wy-text-success:hover{color:#36d278!important}.wy-text-danger{color:#e74c3c!important}a.wy-text-danger:hover{color:#ed7669!important}.wy-text-neutral{color:#404040!important}a.wy-text-neutral:hover{color:#595959!important}.rst-content .toctree-wrapper>p.caption,h1,h2,h3,h4,h5,h6,legend{margin-top:0;font-weight:700;font-family:Roboto Slab,ff-tisa-web-pro,Georgia,Arial,sans-serif}p{line-height:24px;font-size:16px;margin:0 0 24px}h1{font-size:175%}.rst-content .toctree-wrapper>p.caption,h2{font-size:150%}h3{font-size:125%}h4{font-size:115%}h5{font-size:110%}h6{font-size:100%}hr{display:block;height:1px;border:0;border-top:1px solid #e1e4e5;margin:24px 0;padding:0}.rst-content code,.rst-content tt,code{white-space:nowrap;max-width:100%;background:#fff;border:1px solid #e1e4e5;font-size:75%;padding:0 5px;font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;color:#e74c3c;overflow-x:auto}.rst-content tt.code-large,code.code-large{font-size:90%}.rst-content .section ul,.rst-content .toctree-wrapper ul,.rst-content section ul,.wy-plain-list-disc,article ul{list-style:disc;line-height:24px;margin-bottom:24px}.rst-content .section ul li,.rst-content .toctree-wrapper ul li,.rst-content section ul li,.wy-plain-list-disc li,article ul li{list-style:disc;margin-left:24px}.rst-content .section ul li p:last-child,.rst-content .section ul li ul,.rst-content .toctree-wrapper ul li p:last-child,.rst-content .toctree-wrapper ul li ul,.rst-content section ul li p:last-child,.rst-content section ul li ul,.wy-plain-list-disc li p:last-child,.wy-plain-list-disc li ul,article ul li p:last-child,article ul li ul{margin-bottom:0}.rst-content .section ul li li,.rst-content .toctree-wrapper ul li li,.rst-content section ul li li,.wy-plain-list-disc li li,article ul li li{list-style:circle}.rst-content .section ul li li li,.rst-content .toctree-wrapper ul li li li,.rst-content section ul li li li,.wy-plain-list-disc li li li,article ul li li li{list-style:square}.rst-content .section ul li ol li,.rst-content .toctree-wrapper ul li ol li,.rst-content section ul li ol li,.wy-plain-list-disc li ol li,article ul li ol li{list-style:decimal}.rst-content .section ol,.rst-content .section ol.arabic,.rst-content .toctree-wrapper ol,.rst-content .toctree-wrapper ol.arabic,.rst-content section ol,.rst-content section ol.arabic,.wy-plain-list-decimal,article ol{list-style:decimal;line-height:24px;margin-bottom:24px}.rst-content .section ol.arabic li,.rst-content .section ol li,.rst-content .toctree-wrapper ol.arabic li,.rst-content .toctree-wrapper ol li,.rst-content section ol.arabic li,.rst-content section ol li,.wy-plain-list-decimal li,article ol li{list-style:decimal;margin-left:24px}.rst-content .section ol.arabic li ul,.rst-content .section ol li p:last-child,.rst-content .section ol li ul,.rst-content .toctree-wrapper ol.arabic li ul,.rst-content .toctree-wrapper ol li p:last-child,.rst-content .toctree-wrapper ol li ul,.rst-content section ol.arabic li ul,.rst-content section ol li p:last-child,.rst-content section ol li ul,.wy-plain-list-decimal li p:last-child,.wy-plain-list-decimal li ul,article ol li p:last-child,article ol li ul{margin-bottom:0}.rst-content .section ol.arabic li ul li,.rst-content .section ol li ul li,.rst-content .toctree-wrapper ol.arabic li ul li,.rst-content .toctree-wrapper ol li ul li,.rst-content section ol.arabic li ul li,.rst-content section ol li ul li,.wy-plain-list-decimal li ul li,article ol li ul li{list-style:disc}.wy-breadcrumbs{*zoom:1}.wy-breadcrumbs:after,.wy-breadcrumbs:before{display:table;content:""}.wy-breadcrumbs:after{clear:both}.wy-breadcrumbs>li{display:inline-block;padding-top:5px}.wy-breadcrumbs>li.wy-breadcrumbs-aside{float:right}.rst-content .wy-breadcrumbs>li code,.rst-content .wy-breadcrumbs>li tt,.wy-breadcrumbs>li .rst-content tt,.wy-breadcrumbs>li code{all:inherit;color:inherit}.breadcrumb-item:before{content:"/";color:#bbb;font-size:13px;padding:0 6px 0 3px}.wy-breadcrumbs-extra{margin-bottom:0;color:#b3b3b3;font-size:80%;display:inline-block}@media screen and (max-width:480px){.wy-breadcrumbs-extra,.wy-breadcrumbs li.wy-breadcrumbs-aside{display:none}}@media print{.wy-breadcrumbs li.wy-breadcrumbs-aside{display:none}}html{font-size:16px}.wy-affix{position:fixed;top:1.618em}.wy-menu a:hover{text-decoration:none}.wy-menu-horiz{*zoom:1}.wy-menu-horiz:after,.wy-menu-horiz:before{display:table;content:""}.wy-menu-horiz:after{clear:both}.wy-menu-horiz li,.wy-menu-horiz ul{display:inline-block}.wy-menu-horiz li:hover{background:hsla(0,0%,100%,.1)}.wy-menu-horiz li.divide-left{border-left:1px solid #404040}.wy-menu-horiz li.divide-right{border-right:1px solid #404040}.wy-menu-horiz a{height:32px;display:inline-block;line-height:32px;padding:0 16px}.wy-menu-vertical{width:300px}.wy-menu-vertical header,.wy-menu-vertical p.caption{color:#55a5d9;height:32px;line-height:32px;padding:0 1.618em;margin:12px 0 0;display:block;font-weight:700;text-transform:uppercase;font-size:85%;white-space:nowrap}.wy-menu-vertical ul{margin-bottom:0}.wy-menu-vertical li.divide-top{border-top:1px solid #404040}.wy-menu-vertical li.divide-bottom{border-bottom:1px solid #404040}.wy-menu-vertical li.current{background:#e3e3e3}.wy-menu-vertical li.current a{color:grey;border-right:1px solid #c9c9c9;padding:.4045em 2.427em}.wy-menu-vertical li.current a:hover{background:#d6d6d6}.rst-content .wy-menu-vertical li tt,.wy-menu-vertical li .rst-content tt,.wy-menu-vertical li code{border:none;background:inherit;color:inherit;padding-left:0;padding-right:0}.wy-menu-vertical li button.toctree-expand{display:block;float:left;margin-left:-1.2em;line-height:18px;color:#4d4d4d;border:none;background:none;padding:0}.wy-menu-vertical li.current>a,.wy-menu-vertical li.on a{color:#404040;font-weight:700;position:relative;background:#fcfcfc;border:none;padding:.4045em 1.618em}.wy-menu-vertical li.current>a:hover,.wy-menu-vertical li.on a:hover{background:#fcfcfc}.wy-menu-vertical li.current>a:hover button.toctree-expand,.wy-menu-vertical li.on a:hover button.toctree-expand{color:grey}.wy-menu-vertical li.current>a button.toctree-expand,.wy-menu-vertical li.on a button.toctree-expand{display:block;line-height:18px;color:#333}.wy-menu-vertical li.toctree-l1.current>a{border-bottom:1px solid #c9c9c9;border-top:1px solid #c9c9c9}.wy-menu-vertical .toctree-l1.current .toctree-l2>ul,.wy-menu-vertical .toctree-l2.current .toctree-l3>ul,.wy-menu-vertical .toctree-l3.current .toctree-l4>ul,.wy-menu-vertical .toctree-l4.current .toctree-l5>ul,.wy-menu-vertical .toctree-l5.current .toctree-l6>ul,.wy-menu-vertical .toctree-l6.current .toctree-l7>ul,.wy-menu-vertical .toctree-l7.current .toctree-l8>ul,.wy-menu-vertical .toctree-l8.current .toctree-l9>ul,.wy-menu-vertical .toctree-l9.current .toctree-l10>ul,.wy-menu-vertical .toctree-l10.current .toctree-l11>ul{display:none}.wy-menu-vertical .toctree-l1.current .current.toctree-l2>ul,.wy-menu-vertical .toctree-l2.current .current.toctree-l3>ul,.wy-menu-vertical .toctree-l3.current .current.toctree-l4>ul,.wy-menu-vertical .toctree-l4.current .current.toctree-l5>ul,.wy-menu-vertical .toctree-l5.current .current.toctree-l6>ul,.wy-menu-vertical .toctree-l6.current .current.toctree-l7>ul,.wy-menu-vertical .toctree-l7.current .current.toctree-l8>ul,.wy-menu-vertical .toctree-l8.current .current.toctree-l9>ul,.wy-menu-vertical .toctree-l9.current .current.toctree-l10>ul,.wy-menu-vertical .toctree-l10.current .current.toctree-l11>ul{display:block}.wy-menu-vertical li.toctree-l3,.wy-menu-vertical li.toctree-l4{font-size:.9em}.wy-menu-vertical li.toctree-l2 a,.wy-menu-vertical li.toctree-l3 a,.wy-menu-vertical li.toctree-l4 a,.wy-menu-vertical li.toctree-l5 a,.wy-menu-vertical li.toctree-l6 a,.wy-menu-vertical li.toctree-l7 a,.wy-menu-vertical li.toctree-l8 a,.wy-menu-vertical li.toctree-l9 a,.wy-menu-vertical li.toctree-l10 a{color:#404040}.wy-menu-vertical li.toctree-l2 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l3 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l4 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l5 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l6 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l7 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l8 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l9 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l10 a:hover button.toctree-expand{color:grey}.wy-menu-vertical li.toctree-l2.current li.toctree-l3>a,.wy-menu-vertical li.toctree-l3.current li.toctree-l4>a,.wy-menu-vertical li.toctree-l4.current li.toctree-l5>a,.wy-menu-vertical li.toctree-l5.current li.toctree-l6>a,.wy-menu-vertical li.toctree-l6.current li.toctree-l7>a,.wy-menu-vertical li.toctree-l7.current li.toctree-l8>a,.wy-menu-vertical li.toctree-l8.current li.toctree-l9>a,.wy-menu-vertical li.toctree-l9.current li.toctree-l10>a,.wy-menu-vertical li.toctree-l10.current li.toctree-l11>a{display:block}.wy-menu-vertical li.toctree-l2.current>a{padding:.4045em 2.427em}.wy-menu-vertical li.toctree-l2.current li.toctree-l3>a{padding:.4045em 1.618em .4045em 4.045em}.wy-menu-vertical li.toctree-l3.current>a{padding:.4045em 4.045em}.wy-menu-vertical li.toctree-l3.current li.toctree-l4>a{padding:.4045em 1.618em .4045em 5.663em}.wy-menu-vertical li.toctree-l4.current>a{padding:.4045em 5.663em}.wy-menu-vertical li.toctree-l4.current li.toctree-l5>a{padding:.4045em 1.618em .4045em 7.281em}.wy-menu-vertical li.toctree-l5.current>a{padding:.4045em 7.281em}.wy-menu-vertical li.toctree-l5.current li.toctree-l6>a{padding:.4045em 1.618em .4045em 8.899em}.wy-menu-vertical li.toctree-l6.current>a{padding:.4045em 8.899em}.wy-menu-vertical li.toctree-l6.current li.toctree-l7>a{padding:.4045em 1.618em .4045em 10.517em}.wy-menu-vertical li.toctree-l7.current>a{padding:.4045em 10.517em}.wy-menu-vertical li.toctree-l7.current li.toctree-l8>a{padding:.4045em 1.618em .4045em 12.135em}.wy-menu-vertical li.toctree-l8.current>a{padding:.4045em 12.135em}.wy-menu-vertical li.toctree-l8.current li.toctree-l9>a{padding:.4045em 1.618em .4045em 13.753em}.wy-menu-vertical li.toctree-l9.current>a{padding:.4045em 13.753em}.wy-menu-vertical li.toctree-l9.current li.toctree-l10>a{padding:.4045em 1.618em .4045em 15.371em}.wy-menu-vertical li.toctree-l10.current>a{padding:.4045em 15.371em}.wy-menu-vertical li.toctree-l10.current li.toctree-l11>a{padding:.4045em 1.618em .4045em 16.989em}.wy-menu-vertical li.toctree-l2.current>a,.wy-menu-vertical li.toctree-l2.current li.toctree-l3>a{background:#c9c9c9}.wy-menu-vertical li.toctree-l2 button.toctree-expand{color:#a3a3a3}.wy-menu-vertical li.toctree-l3.current>a,.wy-menu-vertical li.toctree-l3.current li.toctree-l4>a{background:#bdbdbd}.wy-menu-vertical li.toctree-l3 button.toctree-expand{color:#969696}.wy-menu-vertical li.current ul{display:block}.wy-menu-vertical li ul{margin-bottom:0;display:none}.wy-menu-vertical li ul li a{margin-bottom:0;color:#d9d9d9;font-weight:400}.wy-menu-vertical a{line-height:18px;padding:.4045em 1.618em;display:block;position:relative;font-size:90%;color:#d9d9d9}.wy-menu-vertical a:hover{background-color:#4e4a4a;cursor:pointer}.wy-menu-vertical a:hover button.toctree-expand{color:#d9d9d9}.wy-menu-vertical a:active{background-color:#2980b9;cursor:pointer;color:#fff}.wy-menu-vertical a:active button.toctree-expand{color:#fff}.wy-side-nav-search{display:block;width:300px;padding:.809em;margin-bottom:.809em;z-index:200;background-color:#2980b9;text-align:center;color:#fcfcfc}.wy-side-nav-search input[type=text]{width:100%;border-radius:50px;padding:6px 12px;border-color:#2472a4}.wy-side-nav-search img{display:block;margin:auto auto .809em;height:45px;width:45px;background-color:#2980b9;padding:5px;border-radius:100%}.wy-side-nav-search .wy-dropdown>a,.wy-side-nav-search>a{color:#fcfcfc;font-size:100%;font-weight:700;display:inline-block;padding:4px 6px;margin-bottom:.809em;max-width:100%}.wy-side-nav-search .wy-dropdown>a:hover,.wy-side-nav-search>a:hover{background:hsla(0,0%,100%,.1)}.wy-side-nav-search .wy-dropdown>a img.logo,.wy-side-nav-search>a img.logo{display:block;margin:0 auto;height:auto;width:auto;border-radius:0;max-width:100%;background:transparent}.wy-side-nav-search .wy-dropdown>a.icon img.logo,.wy-side-nav-search>a.icon img.logo{margin-top:.85em}.wy-side-nav-search>div.version{margin-top:-.4045em;margin-bottom:.809em;font-weight:400;color:hsla(0,0%,100%,.3)}.wy-nav .wy-menu-vertical header{color:#2980b9}.wy-nav .wy-menu-vertical a{color:#b3b3b3}.wy-nav .wy-menu-vertical a:hover{background-color:#2980b9;color:#fff}[data-menu-wrap]{-webkit-transition:all .2s ease-in;-moz-transition:all .2s ease-in;transition:all .2s ease-in;position:absolute;opacity:1;width:100%;opacity:0}[data-menu-wrap].move-center{left:0;right:auto;opacity:1}[data-menu-wrap].move-left{right:auto;left:-100%;opacity:0}[data-menu-wrap].move-right{right:-100%;left:auto;opacity:0}.wy-body-for-nav{background:#fcfcfc}.wy-grid-for-nav{position:absolute;width:100%;height:100%}.wy-nav-side{position:fixed;top:0;bottom:0;left:0;padding-bottom:2em;width:300px;overflow-x:hidden;overflow-y:hidden;min-height:100%;color:#9b9b9b;background:#343131;z-index:200}.wy-side-scroll{width:320px;position:relative;overflow-x:hidden;overflow-y:scroll;height:100%}.wy-nav-top{display:none;background:#2980b9;color:#fff;padding:.4045em .809em;position:relative;line-height:50px;text-align:center;font-size:100%;*zoom:1}.wy-nav-top:after,.wy-nav-top:before{display:table;content:""}.wy-nav-top:after{clear:both}.wy-nav-top a{color:#fff;font-weight:700}.wy-nav-top img{margin-right:12px;height:45px;width:45px;background-color:#2980b9;padding:5px;border-radius:100%}.wy-nav-top i{font-size:30px;float:left;cursor:pointer;padding-top:inherit}.wy-nav-content-wrap{margin-left:300px;background:#fcfcfc;min-height:100%}.wy-nav-content{padding:1.618em 3.236em;height:100%;max-width:800px;margin:auto}.wy-body-mask{position:fixed;width:100%;height:100%;background:rgba(0,0,0,.2);display:none;z-index:499}.wy-body-mask.on{display:block}footer{color:grey}footer p{margin-bottom:12px}.rst-content footer span.commit tt,footer span.commit .rst-content tt,footer span.commit code{padding:0;font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;font-size:1em;background:none;border:none;color:grey}.rst-footer-buttons{*zoom:1}.rst-footer-buttons:after,.rst-footer-buttons:before{width:100%;display:table;content:""}.rst-footer-buttons:after{clear:both}.rst-breadcrumbs-buttons{margin-top:12px;*zoom:1}.rst-breadcrumbs-buttons:after,.rst-breadcrumbs-buttons:before{display:table;content:""}.rst-breadcrumbs-buttons:after{clear:both}#search-results .search li{margin-bottom:24px;border-bottom:1px solid #e1e4e5;padding-bottom:24px}#search-results .search li:first-child{border-top:1px solid #e1e4e5;padding-top:24px}#search-results .search li a{font-size:120%;margin-bottom:12px;display:inline-block}#search-results .context{color:grey;font-size:90%}.genindextable li>ul{margin-left:24px}@media screen and (max-width:768px){.wy-body-for-nav{background:#fcfcfc}.wy-nav-top{display:block}.wy-nav-side{left:-300px}.wy-nav-side.shift{width:85%;left:0}.wy-menu.wy-menu-vertical,.wy-side-nav-search,.wy-side-scroll{width:auto}.wy-nav-content-wrap{margin-left:0}.wy-nav-content-wrap .wy-nav-content{padding:1.618em}.wy-nav-content-wrap.shift{position:fixed;min-width:100%;left:85%;top:0;height:100%;overflow:hidden}}@media screen and (min-width:1100px){.wy-nav-content-wrap{background:rgba(0,0,0,.05)}.wy-nav-content{margin:0;background:#fcfcfc}}@media print{.rst-versions,.wy-nav-side,footer{display:none}.wy-nav-content-wrap{margin-left:0}}.rst-versions{position:fixed;bottom:0;left:0;width:300px;color:#fcfcfc;background:#1f1d1d;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;z-index:400}.rst-versions a{color:#2980b9;text-decoration:none}.rst-versions .rst-badge-small{display:none}.rst-versions .rst-current-version{padding:12px;background-color:#272525;display:block;text-align:right;font-size:90%;cursor:pointer;color:#27ae60;*zoom:1}.rst-versions .rst-current-version:after,.rst-versions .rst-current-version:before{display:table;content:""}.rst-versions .rst-current-version:after{clear:both}.rst-content .code-block-caption .rst-versions .rst-current-version .headerlink,.rst-content .eqno .rst-versions .rst-current-version .headerlink,.rst-content .rst-versions .rst-current-version .admonition-title,.rst-content code.download .rst-versions .rst-current-version span:first-child,.rst-content dl dt .rst-versions .rst-current-version .headerlink,.rst-content h1 .rst-versions .rst-current-version .headerlink,.rst-content h2 .rst-versions .rst-current-version .headerlink,.rst-content h3 .rst-versions .rst-current-version .headerlink,.rst-content h4 .rst-versions .rst-current-version .headerlink,.rst-content h5 .rst-versions .rst-current-version .headerlink,.rst-content h6 .rst-versions .rst-current-version .headerlink,.rst-content p .rst-versions .rst-current-version .headerlink,.rst-content table>caption .rst-versions .rst-current-version .headerlink,.rst-content tt.download .rst-versions .rst-current-version span:first-child,.rst-versions .rst-current-version .fa,.rst-versions .rst-current-version .icon,.rst-versions .rst-current-version .rst-content .admonition-title,.rst-versions .rst-current-version .rst-content .code-block-caption .headerlink,.rst-versions .rst-current-version .rst-content .eqno .headerlink,.rst-versions .rst-current-version .rst-content code.download span:first-child,.rst-versions .rst-current-version .rst-content dl dt .headerlink,.rst-versions .rst-current-version .rst-content h1 .headerlink,.rst-versions .rst-current-version .rst-content h2 .headerlink,.rst-versions .rst-current-version .rst-content h3 .headerlink,.rst-versions .rst-current-version .rst-content h4 .headerlink,.rst-versions .rst-current-version .rst-content h5 .headerlink,.rst-versions .rst-current-version .rst-content h6 .headerlink,.rst-versions .rst-current-version .rst-content p .headerlink,.rst-versions .rst-current-version .rst-content table>caption .headerlink,.rst-versions .rst-current-version .rst-content tt.download span:first-child,.rst-versions .rst-current-version .wy-menu-vertical li button.toctree-expand,.wy-menu-vertical li .rst-versions .rst-current-version button.toctree-expand{color:#fcfcfc}.rst-versions .rst-current-version .fa-book,.rst-versions .rst-current-version .icon-book{float:left}.rst-versions .rst-current-version.rst-out-of-date{background-color:#e74c3c;color:#fff}.rst-versions .rst-current-version.rst-active-old-version{background-color:#f1c40f;color:#000}.rst-versions.shift-up{height:auto;max-height:100%;overflow-y:scroll}.rst-versions.shift-up .rst-other-versions{display:block}.rst-versions .rst-other-versions{font-size:90%;padding:12px;color:grey;display:none}.rst-versions .rst-other-versions hr{display:block;height:1px;border:0;margin:20px 0;padding:0;border-top:1px solid #413d3d}.rst-versions .rst-other-versions dd{display:inline-block;margin:0}.rst-versions .rst-other-versions dd a{display:inline-block;padding:6px;color:#fcfcfc}.rst-versions.rst-badge{width:auto;bottom:20px;right:20px;left:auto;border:none;max-width:300px;max-height:90%}.rst-versions.rst-badge .fa-book,.rst-versions.rst-badge .icon-book{float:none;line-height:30px}.rst-versions.rst-badge.shift-up .rst-current-version{text-align:right}.rst-versions.rst-badge.shift-up .rst-current-version .fa-book,.rst-versions.rst-badge.shift-up .rst-current-version .icon-book{float:left}.rst-versions.rst-badge>.rst-current-version{width:auto;height:30px;line-height:30px;padding:0 6px;display:block;text-align:center}@media screen and (max-width:768px){.rst-versions{width:85%;display:none}.rst-versions.shift{display:block}}.rst-content .toctree-wrapper>p.caption,.rst-content h1,.rst-content h2,.rst-content h3,.rst-content h4,.rst-content h5,.rst-content h6{margin-bottom:24px}.rst-content img{max-width:100%;height:auto}.rst-content div.figure,.rst-content figure{margin-bottom:24px}.rst-content div.figure .caption-text,.rst-content figure .caption-text{font-style:italic}.rst-content div.figure p:last-child.caption,.rst-content figure p:last-child.caption{margin-bottom:0}.rst-content div.figure.align-center,.rst-content figure.align-center{text-align:center}.rst-content .section>a>img,.rst-content .section>img,.rst-content section>a>img,.rst-content section>img{margin-bottom:24px}.rst-content abbr[title]{text-decoration:none}.rst-content.style-external-links a.reference.external:after{font-family:FontAwesome;content:"\f08e";color:#b3b3b3;vertical-align:super;font-size:60%;margin:0 .2em}.rst-content blockquote{margin-left:24px;line-height:24px;margin-bottom:24px}.rst-content pre.literal-block{white-space:pre;margin:0;padding:12px;font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;display:block;overflow:auto}.rst-content div[class^=highlight],.rst-content pre.literal-block{border:1px solid #e1e4e5;overflow-x:auto;margin:1px 0 24px}.rst-content div[class^=highlight] div[class^=highlight],.rst-content pre.literal-block div[class^=highlight]{padding:0;border:none;margin:0}.rst-content div[class^=highlight] td.code{width:100%}.rst-content .linenodiv pre{border-right:1px solid #e6e9ea;margin:0;padding:12px;font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;user-select:none;pointer-events:none}.rst-content div[class^=highlight] pre{white-space:pre;margin:0;padding:12px;display:block;overflow:auto}.rst-content div[class^=highlight] pre .hll{display:block;margin:0 -12px;padding:0 12px}.rst-content .linenodiv pre,.rst-content div[class^=highlight] pre,.rst-content pre.literal-block{font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;font-size:12px;line-height:1.4}.rst-content div.highlight .gp,.rst-content div.highlight span.linenos{user-select:none;pointer-events:none}.rst-content div.highlight span.linenos{display:inline-block;padding-left:0;padding-right:12px;margin-right:12px;border-right:1px solid #e6e9ea}.rst-content .code-block-caption{font-style:italic;font-size:85%;line-height:1;padding:1em 0;text-align:center}@media print{.rst-content .codeblock,.rst-content div[class^=highlight],.rst-content div[class^=highlight] pre{white-space:pre-wrap}}.rst-content .admonition,.rst-content .admonition-todo,.rst-content .attention,.rst-content .caution,.rst-content .danger,.rst-content .error,.rst-content .hint,.rst-content .important,.rst-content .note,.rst-content .seealso,.rst-content .tip,.rst-content .warning{clear:both}.rst-content .admonition-todo .last,.rst-content .admonition-todo>:last-child,.rst-content .admonition .last,.rst-content .admonition>:last-child,.rst-content .attention .last,.rst-content .attention>:last-child,.rst-content .caution .last,.rst-content .caution>:last-child,.rst-content .danger .last,.rst-content .danger>:last-child,.rst-content .error .last,.rst-content .error>:last-child,.rst-content .hint .last,.rst-content .hint>:last-child,.rst-content .important .last,.rst-content .important>:last-child,.rst-content .note .last,.rst-content .note>:last-child,.rst-content .seealso .last,.rst-content .seealso>:last-child,.rst-content .tip .last,.rst-content .tip>:last-child,.rst-content .warning .last,.rst-content .warning>:last-child{margin-bottom:0}.rst-content .admonition-title:before{margin-right:4px}.rst-content .admonition table{border-color:rgba(0,0,0,.1)}.rst-content .admonition table td,.rst-content .admonition table th{background:transparent!important;border-color:rgba(0,0,0,.1)!important}.rst-content .section ol.loweralpha,.rst-content .section ol.loweralpha>li,.rst-content .toctree-wrapper ol.loweralpha,.rst-content .toctree-wrapper ol.loweralpha>li,.rst-content section ol.loweralpha,.rst-content section ol.loweralpha>li{list-style:lower-alpha}.rst-content .section ol.upperalpha,.rst-content .section ol.upperalpha>li,.rst-content .toctree-wrapper ol.upperalpha,.rst-content .toctree-wrapper ol.upperalpha>li,.rst-content section ol.upperalpha,.rst-content section ol.upperalpha>li{list-style:upper-alpha}.rst-content .section ol li>*,.rst-content .section ul li>*,.rst-content .toctree-wrapper ol li>*,.rst-content .toctree-wrapper ul li>*,.rst-content section ol li>*,.rst-content section ul li>*{margin-top:12px;margin-bottom:12px}.rst-content .section ol li>:first-child,.rst-content .section ul li>:first-child,.rst-content .toctree-wrapper ol li>:first-child,.rst-content .toctree-wrapper ul li>:first-child,.rst-content section ol li>:first-child,.rst-content section ul li>:first-child{margin-top:0}.rst-content .section ol li>p,.rst-content .section ol li>p:last-child,.rst-content .section ul li>p,.rst-content .section ul li>p:last-child,.rst-content .toctree-wrapper ol li>p,.rst-content .toctree-wrapper ol li>p:last-child,.rst-content .toctree-wrapper ul li>p,.rst-content .toctree-wrapper ul li>p:last-child,.rst-content section ol li>p,.rst-content section ol li>p:last-child,.rst-content section ul li>p,.rst-content section ul li>p:last-child{margin-bottom:12px}.rst-content .section ol li>p:only-child,.rst-content .section ol li>p:only-child:last-child,.rst-content .section ul li>p:only-child,.rst-content .section ul li>p:only-child:last-child,.rst-content .toctree-wrapper ol li>p:only-child,.rst-content .toctree-wrapper ol li>p:only-child:last-child,.rst-content .toctree-wrapper ul li>p:only-child,.rst-content .toctree-wrapper ul li>p:only-child:last-child,.rst-content section ol li>p:only-child,.rst-content section ol li>p:only-child:last-child,.rst-content section ul li>p:only-child,.rst-content section ul li>p:only-child:last-child{margin-bottom:0}.rst-content .section ol li>ol,.rst-content .section ol li>ul,.rst-content .section ul li>ol,.rst-content .section ul li>ul,.rst-content .toctree-wrapper ol li>ol,.rst-content .toctree-wrapper ol li>ul,.rst-content .toctree-wrapper ul li>ol,.rst-content .toctree-wrapper ul li>ul,.rst-content section ol li>ol,.rst-content section ol li>ul,.rst-content section ul li>ol,.rst-content section ul li>ul{margin-bottom:12px}.rst-content .section ol.simple li>*,.rst-content .section ol.simple li ol,.rst-content .section ol.simple li ul,.rst-content .section ul.simple li>*,.rst-content .section ul.simple li ol,.rst-content .section ul.simple li ul,.rst-content .toctree-wrapper ol.simple li>*,.rst-content .toctree-wrapper ol.simple li ol,.rst-content .toctree-wrapper ol.simple li ul,.rst-content .toctree-wrapper ul.simple li>*,.rst-content .toctree-wrapper ul.simple li ol,.rst-content .toctree-wrapper ul.simple li ul,.rst-content section ol.simple li>*,.rst-content section ol.simple li ol,.rst-content section ol.simple li ul,.rst-content section ul.simple li>*,.rst-content section ul.simple li ol,.rst-content section ul.simple li ul{margin-top:0;margin-bottom:0}.rst-content .line-block{margin-left:0;margin-bottom:24px;line-height:24px}.rst-content .line-block .line-block{margin-left:24px;margin-bottom:0}.rst-content .topic-title{font-weight:700;margin-bottom:12px}.rst-content .toc-backref{color:#404040}.rst-content .align-right{float:right;margin:0 0 24px 24px}.rst-content .align-left{float:left;margin:0 24px 24px 0}.rst-content .align-center{margin:auto}.rst-content .align-center:not(table){display:block}.rst-content .code-block-caption .headerlink,.rst-content .eqno .headerlink,.rst-content .toctree-wrapper>p.caption .headerlink,.rst-content dl dt .headerlink,.rst-content h1 .headerlink,.rst-content h2 .headerlink,.rst-content h3 .headerlink,.rst-content h4 .headerlink,.rst-content h5 .headerlink,.rst-content h6 .headerlink,.rst-content p.caption .headerlink,.rst-content p .headerlink,.rst-content table>caption .headerlink{opacity:0;font-size:14px;font-family:FontAwesome;margin-left:.5em}.rst-content .code-block-caption .headerlink:focus,.rst-content .code-block-caption:hover .headerlink,.rst-content .eqno .headerlink:focus,.rst-content .eqno:hover .headerlink,.rst-content .toctree-wrapper>p.caption .headerlink:focus,.rst-content .toctree-wrapper>p.caption:hover .headerlink,.rst-content dl dt .headerlink:focus,.rst-content dl dt:hover .headerlink,.rst-content h1 .headerlink:focus,.rst-content h1:hover .headerlink,.rst-content h2 .headerlink:focus,.rst-content h2:hover .headerlink,.rst-content h3 .headerlink:focus,.rst-content h3:hover .headerlink,.rst-content h4 .headerlink:focus,.rst-content h4:hover .headerlink,.rst-content h5 .headerlink:focus,.rst-content h5:hover .headerlink,.rst-content h6 .headerlink:focus,.rst-content h6:hover .headerlink,.rst-content p.caption .headerlink:focus,.rst-content p.caption:hover .headerlink,.rst-content p .headerlink:focus,.rst-content p:hover .headerlink,.rst-content table>caption .headerlink:focus,.rst-content table>caption:hover .headerlink{opacity:1}.rst-content p a{overflow-wrap:anywhere}.rst-content .wy-table td p,.rst-content .wy-table td ul,.rst-content .wy-table th p,.rst-content .wy-table th ul,.rst-content table.docutils td p,.rst-content table.docutils td ul,.rst-content table.docutils th p,.rst-content table.docutils th ul,.rst-content table.field-list td p,.rst-content table.field-list td ul,.rst-content table.field-list th p,.rst-content table.field-list th ul{font-size:inherit}.rst-content .btn:focus{outline:2px solid}.rst-content table>caption .headerlink:after{font-size:12px}.rst-content .centered{text-align:center}.rst-content .sidebar{float:right;width:40%;display:block;margin:0 0 24px 24px;padding:24px;background:#f3f6f6;border:1px solid #e1e4e5}.rst-content .sidebar dl,.rst-content .sidebar p,.rst-content .sidebar ul{font-size:90%}.rst-content .sidebar .last,.rst-content .sidebar>:last-child{margin-bottom:0}.rst-content .sidebar .sidebar-title{display:block;font-family:Roboto Slab,ff-tisa-web-pro,Georgia,Arial,sans-serif;font-weight:700;background:#e1e4e5;padding:6px 12px;margin:-24px -24px 24px;font-size:100%}.rst-content .highlighted{background:#f1c40f;box-shadow:0 0 0 2px #f1c40f;display:inline;font-weight:700}.rst-content .citation-reference,.rst-content .footnote-reference{vertical-align:baseline;position:relative;top:-.4em;line-height:0;font-size:90%}.rst-content .citation-reference>span.fn-bracket,.rst-content .footnote-reference>span.fn-bracket{display:none}.rst-content .hlist{width:100%}.rst-content dl dt span.classifier:before{content:" : "}.rst-content dl dt span.classifier-delimiter{display:none!important}html.writer-html4 .rst-content table.docutils.citation,html.writer-html4 .rst-content table.docutils.footnote{background:none;border:none}html.writer-html4 .rst-content table.docutils.citation td,html.writer-html4 .rst-content table.docutils.citation tr,html.writer-html4 .rst-content table.docutils.footnote td,html.writer-html4 .rst-content table.docutils.footnote tr{border:none;background-color:transparent!important;white-space:normal}html.writer-html4 .rst-content table.docutils.citation td.label,html.writer-html4 .rst-content table.docutils.footnote td.label{padding-left:0;padding-right:0;vertical-align:top}html.writer-html5 .rst-content dl.citation,html.writer-html5 .rst-content dl.field-list,html.writer-html5 .rst-content dl.footnote{display:grid;grid-template-columns:auto minmax(80%,95%)}html.writer-html5 .rst-content dl.citation>dt,html.writer-html5 .rst-content dl.field-list>dt,html.writer-html5 .rst-content dl.footnote>dt{display:inline-grid;grid-template-columns:max-content auto}html.writer-html5 .rst-content aside.citation,html.writer-html5 .rst-content aside.footnote,html.writer-html5 .rst-content div.citation{display:grid;grid-template-columns:auto auto minmax(.65rem,auto) minmax(40%,95%)}html.writer-html5 .rst-content aside.citation>span.label,html.writer-html5 .rst-content aside.footnote>span.label,html.writer-html5 .rst-content div.citation>span.label{grid-column-start:1;grid-column-end:2}html.writer-html5 .rst-content aside.citation>span.backrefs,html.writer-html5 .rst-content aside.footnote>span.backrefs,html.writer-html5 .rst-content div.citation>span.backrefs{grid-column-start:2;grid-column-end:3;grid-row-start:1;grid-row-end:3}html.writer-html5 .rst-content aside.citation>p,html.writer-html5 .rst-content aside.footnote>p,html.writer-html5 .rst-content div.citation>p{grid-column-start:4;grid-column-end:5}html.writer-html5 .rst-content dl.citation,html.writer-html5 .rst-content dl.field-list,html.writer-html5 .rst-content dl.footnote{margin-bottom:24px}html.writer-html5 .rst-content dl.citation>dt,html.writer-html5 .rst-content dl.field-list>dt,html.writer-html5 .rst-content dl.footnote>dt{padding-left:1rem}html.writer-html5 .rst-content dl.citation>dd,html.writer-html5 .rst-content dl.citation>dt,html.writer-html5 .rst-content dl.field-list>dd,html.writer-html5 .rst-content dl.field-list>dt,html.writer-html5 .rst-content dl.footnote>dd,html.writer-html5 .rst-content dl.footnote>dt{margin-bottom:0}html.writer-html5 .rst-content dl.citation,html.writer-html5 .rst-content dl.footnote{font-size:.9rem}html.writer-html5 .rst-content dl.citation>dt,html.writer-html5 .rst-content dl.footnote>dt{margin:0 .5rem .5rem 0;line-height:1.2rem;word-break:break-all;font-weight:400}html.writer-html5 .rst-content dl.citation>dt>span.brackets:before,html.writer-html5 .rst-content dl.footnote>dt>span.brackets:before{content:"["}html.writer-html5 .rst-content dl.citation>dt>span.brackets:after,html.writer-html5 .rst-content dl.footnote>dt>span.brackets:after{content:"]"}html.writer-html5 .rst-content dl.citation>dt>span.fn-backref,html.writer-html5 .rst-content dl.footnote>dt>span.fn-backref{text-align:left;font-style:italic;margin-left:.65rem;word-break:break-word;word-spacing:-.1rem;max-width:5rem}html.writer-html5 .rst-content dl.citation>dt>span.fn-backref>a,html.writer-html5 .rst-content dl.footnote>dt>span.fn-backref>a{word-break:keep-all}html.writer-html5 .rst-content dl.citation>dt>span.fn-backref>a:not(:first-child):before,html.writer-html5 .rst-content dl.footnote>dt>span.fn-backref>a:not(:first-child):before{content:" "}html.writer-html5 .rst-content dl.citation>dd,html.writer-html5 .rst-content dl.footnote>dd{margin:0 0 .5rem;line-height:1.2rem}html.writer-html5 .rst-content dl.citation>dd p,html.writer-html5 .rst-content dl.footnote>dd p{font-size:.9rem}html.writer-html5 .rst-content aside.citation,html.writer-html5 .rst-content aside.footnote,html.writer-html5 .rst-content div.citation{padding-left:1rem;padding-right:1rem;font-size:.9rem;line-height:1.2rem}html.writer-html5 .rst-content aside.citation p,html.writer-html5 .rst-content aside.footnote p,html.writer-html5 .rst-content div.citation p{font-size:.9rem;line-height:1.2rem;margin-bottom:12px}html.writer-html5 .rst-content aside.citation span.backrefs,html.writer-html5 .rst-content aside.footnote span.backrefs,html.writer-html5 .rst-content div.citation span.backrefs{text-align:left;font-style:italic;margin-left:.65rem;word-break:break-word;word-spacing:-.1rem;max-width:5rem}html.writer-html5 .rst-content aside.citation span.backrefs>a,html.writer-html5 .rst-content aside.footnote span.backrefs>a,html.writer-html5 .rst-content div.citation span.backrefs>a{word-break:keep-all}html.writer-html5 .rst-content aside.citation span.backrefs>a:not(:first-child):before,html.writer-html5 .rst-content aside.footnote span.backrefs>a:not(:first-child):before,html.writer-html5 .rst-content div.citation span.backrefs>a:not(:first-child):before{content:" "}html.writer-html5 .rst-content aside.citation span.label,html.writer-html5 .rst-content aside.footnote span.label,html.writer-html5 .rst-content div.citation span.label{line-height:1.2rem}html.writer-html5 .rst-content aside.citation-list,html.writer-html5 .rst-content aside.footnote-list,html.writer-html5 .rst-content div.citation-list{margin-bottom:24px}html.writer-html5 .rst-content dl.option-list kbd{font-size:.9rem}.rst-content table.docutils.footnote,html.writer-html4 .rst-content table.docutils.citation,html.writer-html5 .rst-content aside.footnote,html.writer-html5 .rst-content aside.footnote-list aside.footnote,html.writer-html5 .rst-content div.citation-list>div.citation,html.writer-html5 .rst-content dl.citation,html.writer-html5 .rst-content dl.footnote{color:grey}.rst-content table.docutils.footnote code,.rst-content table.docutils.footnote tt,html.writer-html4 .rst-content table.docutils.citation code,html.writer-html4 .rst-content table.docutils.citation tt,html.writer-html5 .rst-content aside.footnote-list aside.footnote code,html.writer-html5 .rst-content aside.footnote-list aside.footnote tt,html.writer-html5 .rst-content aside.footnote code,html.writer-html5 .rst-content aside.footnote tt,html.writer-html5 .rst-content div.citation-list>div.citation code,html.writer-html5 .rst-content div.citation-list>div.citation tt,html.writer-html5 .rst-content dl.citation code,html.writer-html5 .rst-content dl.citation tt,html.writer-html5 .rst-content dl.footnote code,html.writer-html5 .rst-content dl.footnote tt{color:#555}.rst-content .wy-table-responsive.citation,.rst-content .wy-table-responsive.footnote{margin-bottom:0}.rst-content .wy-table-responsive.citation+:not(.citation),.rst-content .wy-table-responsive.footnote+:not(.footnote){margin-top:24px}.rst-content .wy-table-responsive.citation:last-child,.rst-content .wy-table-responsive.footnote:last-child{margin-bottom:24px}.rst-content table.docutils th{border-color:#e1e4e5}html.writer-html5 .rst-content table.docutils th{border:1px solid #e1e4e5}html.writer-html5 .rst-content table.docutils td>p,html.writer-html5 .rst-content table.docutils th>p{line-height:1rem;margin-bottom:0;font-size:.9rem}.rst-content table.docutils td .last,.rst-content table.docutils td .last>:last-child{margin-bottom:0}.rst-content table.field-list,.rst-content table.field-list td{border:none}.rst-content table.field-list td p{line-height:inherit}.rst-content table.field-list td>strong{display:inline-block}.rst-content table.field-list .field-name{padding-right:10px;text-align:left;white-space:nowrap}.rst-content table.field-list .field-body{text-align:left}.rst-content code,.rst-content tt{color:#000;font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;padding:2px 5px}.rst-content code big,.rst-content code em,.rst-content tt big,.rst-content tt em{font-size:100%!important;line-height:normal}.rst-content code.literal,.rst-content tt.literal{color:#e74c3c;white-space:normal}.rst-content code.xref,.rst-content tt.xref,a .rst-content code,a .rst-content tt{font-weight:700;color:#404040;overflow-wrap:normal}.rst-content kbd,.rst-content pre,.rst-content samp{font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace}.rst-content a code,.rst-content a tt{color:#2980b9}.rst-content dl{margin-bottom:24px}.rst-content dl dt{font-weight:700;margin-bottom:12px}.rst-content dl ol,.rst-content dl p,.rst-content dl table,.rst-content dl ul{margin-bottom:12px}.rst-content dl dd{margin:0 0 12px 24px;line-height:24px}.rst-content dl dd>ol:last-child,.rst-content dl dd>p:last-child,.rst-content dl dd>table:last-child,.rst-content dl dd>ul:last-child{margin-bottom:0}html.writer-html4 .rst-content dl:not(.docutils),html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple){margin-bottom:24px}html.writer-html4 .rst-content dl:not(.docutils)>dt,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt{display:table;margin:6px 0;font-size:90%;line-height:normal;background:#e7f2fa;color:#2980b9;border-top:3px solid #6ab0de;padding:6px;position:relative}html.writer-html4 .rst-content dl:not(.docutils)>dt:before,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt:before{color:#6ab0de}html.writer-html4 .rst-content dl:not(.docutils)>dt .headerlink,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt .headerlink{color:#404040;font-size:100%!important}html.writer-html4 .rst-content dl:not(.docutils) dl:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) dl:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt{margin-bottom:6px;border:none;border-left:3px solid #ccc;background:#f0f0f0;color:#555}html.writer-html4 .rst-content dl:not(.docutils) dl:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt .headerlink,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) dl:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt .headerlink{color:#404040;font-size:100%!important}html.writer-html4 .rst-content dl:not(.docutils)>dt:first-child,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt:first-child{margin-top:0}html.writer-html4 .rst-content dl:not(.docutils) code.descclassname,html.writer-html4 .rst-content dl:not(.docutils) code.descname,html.writer-html4 .rst-content dl:not(.docutils) tt.descclassname,html.writer-html4 .rst-content dl:not(.docutils) tt.descname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) code.descclassname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) code.descname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) tt.descclassname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) tt.descname{background-color:transparent;border:none;padding:0;font-size:100%!important}html.writer-html4 .rst-content dl:not(.docutils) code.descname,html.writer-html4 .rst-content dl:not(.docutils) tt.descname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) code.descname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) tt.descname{font-weight:700}html.writer-html4 .rst-content dl:not(.docutils) .optional,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .optional{display:inline-block;padding:0 4px;color:#000;font-weight:700}html.writer-html4 .rst-content dl:not(.docutils) .property,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .property{display:inline-block;padding-right:8px;max-width:100%}html.writer-html4 .rst-content dl:not(.docutils) .k,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .k{font-style:italic}html.writer-html4 .rst-content dl:not(.docutils) .descclassname,html.writer-html4 .rst-content dl:not(.docutils) .descname,html.writer-html4 .rst-content dl:not(.docutils) .sig-name,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .descclassname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .descname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .sig-name{font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;color:#000}.rst-content .viewcode-back,.rst-content .viewcode-link{display:inline-block;color:#27ae60;font-size:80%;padding-left:24px}.rst-content .viewcode-back{display:block;float:right}.rst-content p.rubric{margin-bottom:12px;font-weight:700}.rst-content code.download,.rst-content tt.download{background:inherit;padding:inherit;font-weight:400;font-family:inherit;font-size:inherit;color:inherit;border:inherit;white-space:inherit}.rst-content code.download span:first-child,.rst-content tt.download span:first-child{-webkit-font-smoothing:subpixel-antialiased}.rst-content code.download span:first-child:before,.rst-content tt.download span:first-child:before{margin-right:4px}.rst-content .guilabel{border:1px solid #7fbbe3;background:#e7f2fa;font-size:80%;font-weight:700;border-radius:4px;padding:2.4px 6px;margin:auto 2px}.rst-content :not(dl.option-list)>:not(dt):not(kbd):not(.kbd)>.kbd,.rst-content :not(dl.option-list)>:not(dt):not(kbd):not(.kbd)>kbd{color:inherit;font-size:80%;background-color:#fff;border:1px solid #a6a6a6;border-radius:4px;box-shadow:0 2px grey;padding:2.4px 6px;margin:auto 0}.rst-content .versionmodified{font-style:italic}@media screen and (max-width:480px){.rst-content .sidebar{width:100%}}span[id*=MathJax-Span]{color:#404040}.math{text-align:center}@font-face{font-family:Lato;src:url(fonts/lato-normal.woff2?bd03a2cc277bbbc338d464e679fe9942) format("woff2"),url(fonts/lato-normal.woff?27bd77b9162d388cb8d4c4217c7c5e2a) format("woff");font-weight:400;font-style:normal;font-display:block}@font-face{font-family:Lato;src:url(fonts/lato-bold.woff2?cccb897485813c7c256901dbca54ecf2) format("woff2"),url(fonts/lato-bold.woff?d878b6c29b10beca227e9eef4246111b) format("woff");font-weight:700;font-style:normal;font-display:block}@font-face{font-family:Lato;src:url(fonts/lato-bold-italic.woff2?0b6bb6725576b072c5d0b02ecdd1900d) format("woff2"),url(fonts/lato-bold-italic.woff?9c7e4e9eb485b4a121c760e61bc3707c) format("woff");font-weight:700;font-style:italic;font-display:block}@font-face{font-family:Lato;src:url(fonts/lato-normal-italic.woff2?4eb103b4d12be57cb1d040ed5e162e9d) format("woff2"),url(fonts/lato-normal-italic.woff?f28f2d6482446544ef1ea1ccc6dd5892) format("woff");font-weight:400;font-style:italic;font-display:block}@font-face{font-family:Roboto Slab;font-style:normal;font-weight:400;src:url(fonts/Roboto-Slab-Regular.woff2?7abf5b8d04d26a2cafea937019bca958) format("woff2"),url(fonts/Roboto-Slab-Regular.woff?c1be9284088d487c5e3ff0a10a92e58c) format("woff");font-display:block}@font-face{font-family:Roboto Slab;font-style:normal;font-weight:700;src:url(fonts/Roboto-Slab-Bold.woff2?9984f4a9bda09be08e83f2506954adbe) format("woff2"),url(fonts/Roboto-Slab-Bold.woff?bed5564a116b05148e3b3bea6fb1162a) format("woff");font-display:block} diff --git a/css/theme_extra.css b/css/theme_extra.css new file mode 100644 index 00000000..ab0631a1 --- /dev/null +++ b/css/theme_extra.css @@ -0,0 +1,197 @@ +/* + * Wrap inline code samples otherwise they shoot of the side and + * can't be read at all. + * + * https://github.com/mkdocs/mkdocs/issues/313 + * https://github.com/mkdocs/mkdocs/issues/233 + * https://github.com/mkdocs/mkdocs/issues/834 + */ +.rst-content code { + white-space: pre-wrap; + word-wrap: break-word; + padding: 2px 5px; +} + +/** + * Make code blocks display as blocks and give them the appropriate + * font size and padding. + * + * https://github.com/mkdocs/mkdocs/issues/855 + * https://github.com/mkdocs/mkdocs/issues/834 + * https://github.com/mkdocs/mkdocs/issues/233 + */ +.rst-content pre code { + white-space: pre; + word-wrap: normal; + display: block; + padding: 12px; + font-size: 12px; +} + +/** + * Fix code colors + * + * https://github.com/mkdocs/mkdocs/issues/2027 + */ +.rst-content code { + color: #E74C3C; +} + +.rst-content pre code { + color: #000; + background: #f8f8f8; +} + +/* + * Fix link colors when the link text is inline code. + * + * https://github.com/mkdocs/mkdocs/issues/718 + */ +a code { + color: #2980B9; +} +a:hover code { + color: #3091d1; +} +a:visited code { + color: #9B59B6; +} + +/* + * The CSS classes from highlight.js seem to clash with the + * ReadTheDocs theme causing some code to be incorrectly made + * bold and italic. + * + * https://github.com/mkdocs/mkdocs/issues/411 + */ +pre .cs, pre .c { + font-weight: inherit; + font-style: inherit; +} + +/* + * Fix some issues with the theme and non-highlighted code + * samples. Without and highlighting styles attached the + * formatting is broken. + * + * https://github.com/mkdocs/mkdocs/issues/319 + */ +.rst-content .no-highlight { + display: block; + padding: 0.5em; + color: #333; +} + + +/* + * Additions specific to the search functionality provided by MkDocs + */ + +.search-results { + margin-top: 23px; +} + +.search-results article { + border-top: 1px solid #E1E4E5; + padding-top: 24px; +} + +.search-results article:first-child { + border-top: none; +} + +form .search-query { + width: 100%; + border-radius: 50px; + padding: 6px 12px; + border-color: #D1D4D5; +} + +/* + * Improve inline code blocks within admonitions. + * + * https://github.com/mkdocs/mkdocs/issues/656 + */ + .rst-content .admonition code { + color: #404040; + border: 1px solid #c7c9cb; + border: 1px solid rgba(0, 0, 0, 0.2); + background: #f8fbfd; + background: rgba(255, 255, 255, 0.7); +} + +/* + * Account for wide tables which go off the side. + * Override borders to avoid weirdness on narrow tables. + * + * https://github.com/mkdocs/mkdocs/issues/834 + * https://github.com/mkdocs/mkdocs/pull/1034 + */ +.rst-content .section .docutils { + width: 100%; + overflow: auto; + display: block; + border: none; +} + +td, th { + border: 1px solid #e1e4e5 !important; + border-collapse: collapse; +} + +/* + * Without the following amendments, the navigation in the theme will be + * slightly cut off. This is due to the fact that the .wy-nav-side has a + * padding-bottom of 2em, which must not necessarily align with the font-size of + * 90 % on the .rst-current-version container, combined with the padding of 12px + * above and below. These amendments fix this in two steps: First, make sure the + * .rst-current-version container has a fixed height of 40px, achieved using + * line-height, and then applying a padding-bottom of 40px to this container. In + * a second step, the items within that container are re-aligned using flexbox. + * + * https://github.com/mkdocs/mkdocs/issues/2012 + */ + .wy-nav-side { + padding-bottom: 40px; +} + +/* For section-index only */ +.wy-menu-vertical .current-section p { + background-color: #e3e3e3; + color: #404040; +} + +/* + * The second step of above amendment: Here we make sure the items are aligned + * correctly within the .rst-current-version container. Using flexbox, we + * achieve it in such a way that it will look like the following: + * + * [No repo_name] + * Next >> // On the first page + * << Previous Next >> // On all subsequent pages + * + * [With repo_name] + * Next >> // On the first page + * << Previous Next >> // On all subsequent pages + * + * https://github.com/mkdocs/mkdocs/issues/2012 + */ +.rst-versions .rst-current-version { + padding: 0 12px; + display: flex; + font-size: initial; + justify-content: space-between; + align-items: center; + line-height: 40px; +} + +/* + * Please note that this amendment also involves removing certain inline-styles + * from the file ./mkdocs/themes/readthedocs/versions.html. + * + * https://github.com/mkdocs/mkdocs/issues/2012 + */ +.rst-current-version span { + flex: 1; + text-align: center; +} diff --git a/filesystem/index.html b/filesystem/index.html new file mode 100644 index 00000000..da18f9ae --- /dev/null +++ b/filesystem/index.html @@ -0,0 +1,303 @@ + + + + + + + + The File System - Introduction to Kebnekaise + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
    +
  • + +
  • +
  • +
+
+
+
+
+ +

The File System

+
+

Objectives

+
    +
  • Learn about the file system on Kebnekaise
  • +
  • Find the project storage for this course and create your own subdirectory
  • +
+
+

Overview

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Project storage$HOME/scratch
Recommended for
batch jobs
YesNo (size)Yes
Backed upNoYesNo
Accessible by
batch system
YesYesYes (node only)
PerformanceHighHighMedium
Default readabilityGroup onlyOwnerOwner
Permissions
management
chmod, chgrp, ACLchmod, chgrp, ACLN/A for batch jobs
NotesStorage your group get
allocated through the
storage projects
Your home-directoryPer node
+

$HOME

+

This is your home-directory (pointed to by the $HOME variable). It has a quota limit of 25GB per default. Your home directory is backed up regularly.

+
+

Note

+

Since the home directory is quite small, it should not be used for most production jobs. These should instead be run from project storage directories.

+
+

To find the path to your home directory, either run pwd just after logging in, or do the following:

+
b-an01 [~/store]$ cd
+b-an01 [~]$ pwd
+/home/u/username
+b-an01 [~]$
+
+

Project storage

+

Project storage is where a project’s members have the majority of their storage. It is applied for through SUPR, as a storage project. While storage projects needs to be applied for separately, they are usually linked to a compute project.

+

This is where you should keep your data and run your batch jobs from. It offers high performance when accessed from the nodes making it suitable for storage that are to be accessed from parallel jobs, and your home directory (usually) has too little space.

+

Project storage is located below /proj/nobackup/ in the directory name selected during the creation of the proposal.

+
+

Note

+

The project storage is not intended for permanent storage and there is NO BACKUP of /proj/nobackup.

+
+

Using project storage

+
    +
  • If you have a storage project, you should use that to run your jobs.
  • +
  • You (your PI) will either choose a directory name when you/they apply for the storage project or get the project id as default name.
  • +
  • The location of the storage project in the file system is /proj/nobackup/NAME-YOU-PICKED
  • +
  • Since the storage project is shared between all users of the project, you should go to that directory and create a subdirectory for your things, which you will then be using.- For this course the storage is in +
    /proj/nobackup/intro-hpc2n
    +
  • +
+
+

Exercise

+

Go to the course project storage and create a subdirectory for yourself.

+
+

Now is a good time to prepare the course material and download the exercises. The easiest way to do so is by cloning the whole intro-course repository from GitHub.

+
+

Exercise

+
    +
  1. Go to the subdirectory you created under /proj/nobackup/intro-hpc2n
  2. +
  3. Clone the repository for the course: git clone https://github.com/hpc2n/intro-course.git
  4. +
+

You will get a directory called intro-course. Below it you will find a directory called “exercises” where the majority of the exercises for the batch system section is located.

+
+

Quota

+

The size of the storage depends on the allocation. There are small, medium, and large storage projects, each with their own requirements. You can read about this on SUPR. The quota limits are specific for the project as such, there are no user level quotas on that space.

+

/scratch

+

Our recommendation is that you use the project storage instead of /scratch when working on Compute nodes or Login nodes.

+

On the computers at HPC2N there is a directory called /scratch. It is a small local area split between the users using the node and it can be used for saving (temporary) files you create or need during your computations. Please do not save files in /scratch you don’t need when not running jobs on the machine, and please make sure your job removes any temporary files it creates.

+
+

Note

+

When anybody need more space than available on /scratch, we will remove the oldest/largest files without any notices.

+
+

More information about the file system, as well as archiving and compressing files, at the HPC2N documentation about File Systems.

+
+

Keypoints

+
    +
  • When you login to Kebnekaise, you will end up in your home-directory.
  • +
  • Your home-directory is in /home/u/username and is pointed to by the environment variable $HOME.
  • +
  • Your project storage is located in /proj/nobackup/NAME-YOU-PICKED
      +
    • For this course it is /proj/nobackup/intro-hpc2n.
    • +
    • The project storage is NOT backed up.
    • +
    +
  • +
  • You should run the batch jobs from your project storage.
  • +
+
+ +
+
+ +
+
+ +
+ +
+ +
+ + + + « Previous + + + Next » + + +
+ + + + + + + + + + + diff --git a/images/24-word-cloud-hpc2n-med-proj.png b/images/24-word-cloud-hpc2n-med-proj.png new file mode 100644 index 00000000..0b106c44 Binary files /dev/null and b/images/24-word-cloud-hpc2n-med-proj.png differ diff --git a/images/A100-allocation.png b/images/A100-allocation.png new file mode 100644 index 00000000..f1c4163e Binary files /dev/null and b/images/A100-allocation.png differ diff --git a/images/Allocation-Kebnekaise-GPU_v3.png b/images/Allocation-Kebnekaise-GPU_v3.png new file mode 100644 index 00000000..26ad1735 Binary files /dev/null and b/images/Allocation-Kebnekaise-GPU_v3.png differ diff --git a/images/Allocation-Kebnekaise-largemem_v3.png b/images/Allocation-Kebnekaise-largemem_v3.png new file mode 100644 index 00000000..9c518d74 Binary files /dev/null and b/images/Allocation-Kebnekaise-largemem_v3.png differ diff --git a/images/Allocation-Kebnekaise-thin_skylake.png b/images/Allocation-Kebnekaise-thin_skylake.png new file mode 100644 index 00000000..d77a14e1 Binary files /dev/null and b/images/Allocation-Kebnekaise-thin_skylake.png differ diff --git a/images/Allocation-Kebnekaise-thin_v3.png b/images/Allocation-Kebnekaise-thin_v3.png new file mode 100644 index 00000000..c91cc5c5 Binary files /dev/null and b/images/Allocation-Kebnekaise-thin_v3.png differ diff --git a/images/K80-GPUs.png b/images/K80-GPUs.png new file mode 100644 index 00000000..d52e98d9 Binary files /dev/null and b/images/K80-GPUs.png differ diff --git a/images/SNIC_logo.png b/images/SNIC_logo.png new file mode 100644 index 00000000..09153a9d Binary files /dev/null and b/images/SNIC_logo.png differ diff --git a/images/SNIC_logo_lower.png b/images/SNIC_logo_lower.png new file mode 100644 index 00000000..0bdb6a20 Binary files /dev/null and b/images/SNIC_logo_lower.png differ diff --git a/images/SciLifeLab.png b/images/SciLifeLab.png new file mode 100644 index 00000000..973e3daf Binary files /dev/null and b/images/SciLifeLab.png differ diff --git a/images/V100-allocation-new.png b/images/V100-allocation-new.png new file mode 100644 index 00000000..bd05830b Binary files /dev/null and b/images/V100-allocation-new.png differ diff --git a/images/VR-Logo-ENG-SVART-322x150.png b/images/VR-Logo-ENG-SVART-322x150.png new file mode 100644 index 00000000..c432a3c2 Binary files /dev/null and b/images/VR-Logo-ENG-SVART-322x150.png differ diff --git a/images/WLCG-logo.png b/images/WLCG-logo.png new file mode 100644 index 00000000..2be61edf Binary files /dev/null and b/images/WLCG-logo.png differ diff --git a/images/algoryx.png b/images/algoryx.png new file mode 100644 index 00000000..60ebe274 Binary files /dev/null and b/images/algoryx.png differ diff --git a/images/choose.png b/images/choose.png new file mode 100644 index 00000000..799da2f0 Binary files /dev/null and b/images/choose.png differ diff --git a/images/choose2.png b/images/choose2.png new file mode 100644 index 00000000..6cd0585e Binary files /dev/null and b/images/choose2.png differ diff --git a/images/default-dir.png b/images/default-dir.png new file mode 100644 index 00000000..c2205a5b Binary files /dev/null and b/images/default-dir.png differ diff --git a/images/default-storage.png b/images/default-storage.png new file mode 100644 index 00000000..2f60571b Binary files /dev/null and b/images/default-storage.png differ diff --git a/images/dm.pdf b/images/dm.pdf new file mode 100644 index 00000000..1880e5ba Binary files /dev/null and b/images/dm.pdf differ diff --git a/images/dm.png b/images/dm.png new file mode 100644 index 00000000..409d7da4 Binary files /dev/null and b/images/dm.png differ diff --git a/images/dm.svg b/images/dm.svg new file mode 100644 index 00000000..79774770 --- /dev/null +++ b/images/dm.svg @@ -0,0 +1,688 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + image/svg+xml + + + + + + + + + + + + + + + + Network / Bus + + + + + + + + + + + + + + + + + + diff --git a/images/eiscat-logo5.png b/images/eiscat-logo5.png new file mode 100644 index 00000000..3db5242d Binary files /dev/null and b/images/eiscat-logo5.png differ diff --git a/images/eosc-nordic.png b/images/eosc-nordic.png new file mode 100644 index 00000000..df25c3c8 Binary files /dev/null and b/images/eosc-nordic.png differ diff --git a/images/essence.png b/images/essence.png new file mode 100644 index 00000000..420dc1c4 Binary files /dev/null and b/images/essence.png differ diff --git a/images/gedit.png b/images/gedit.png new file mode 100644 index 00000000..64bcfc18 Binary files /dev/null and b/images/gedit.png differ diff --git a/images/hpc.gif b/images/hpc.gif new file mode 100644 index 00000000..3210a401 Binary files /dev/null and b/images/hpc.gif differ diff --git a/images/hpc.svg b/images/hpc.svg new file mode 100644 index 00000000..b1e83f3e --- /dev/null +++ b/images/hpc.svg @@ -0,0 +1,321 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + image/svg+xml + + + + + + + + + + HPC + + + supercomputer + high-end workstation + laptop + small computing cluster + + + memory / storage + performance + + + + diff --git a/images/hpc1.pdf b/images/hpc1.pdf new file mode 100644 index 00000000..5a2808e5 Binary files /dev/null and b/images/hpc1.pdf differ diff --git a/images/hpc2.pdf b/images/hpc2.pdf new file mode 100644 index 00000000..912053dd Binary files /dev/null and b/images/hpc2.pdf differ diff --git a/images/hpc2n-blue-text.png b/images/hpc2n-blue-text.png new file mode 100644 index 00000000..36818700 Binary files /dev/null and b/images/hpc2n-blue-text.png differ diff --git a/images/hpc2n-logo-text5.png b/images/hpc2n-logo-text5.png new file mode 100644 index 00000000..b5297a64 Binary files /dev/null and b/images/hpc2n-logo-text5.png differ diff --git a/images/hpc2n-no-text.png b/images/hpc2n-no-text.png new file mode 100644 index 00000000..8ef2fe35 Binary files /dev/null and b/images/hpc2n-no-text.png differ diff --git a/images/hpc2n.png b/images/hpc2n.png new file mode 100644 index 00000000..6b26048a Binary files /dev/null and b/images/hpc2n.png differ diff --git a/images/hpc3.pdf b/images/hpc3.pdf new file mode 100644 index 00000000..5d8020a9 Binary files /dev/null and b/images/hpc3.pdf differ diff --git a/images/hpc_faster.gif b/images/hpc_faster.gif new file mode 100644 index 00000000..8826e69b Binary files /dev/null and b/images/hpc_faster.gif differ diff --git a/images/irf.png b/images/irf.png new file mode 100644 index 00000000..67b120e2 Binary files /dev/null and b/images/irf.png differ diff --git a/images/kebnekaise.png b/images/kebnekaise.png new file mode 100644 index 00000000..99695c0d Binary files /dev/null and b/images/kebnekaise.png differ diff --git a/images/large-users.png b/images/large-users.png new file mode 100644 index 00000000..cc33fa8a Binary files /dev/null and b/images/large-users.png differ diff --git a/images/linked.png b/images/linked.png new file mode 100644 index 00000000..777d69d6 Binary files /dev/null and b/images/linked.png differ diff --git a/images/ltu.preview.png b/images/ltu.preview.png new file mode 100644 index 00000000..fd433711 Binary files /dev/null and b/images/ltu.preview.png differ diff --git a/images/medium-users.png b/images/medium-users.png new file mode 100644 index 00000000..d34c0baf Binary files /dev/null and b/images/medium-users.png differ diff --git a/images/memory.pdf b/images/memory.pdf new file mode 100644 index 00000000..4354e828 Binary files /dev/null and b/images/memory.pdf differ diff --git a/images/memory.png b/images/memory.png new file mode 100644 index 00000000..982fbab2 Binary files /dev/null and b/images/memory.png differ diff --git a/images/memory.svg b/images/memory.svg new file mode 100644 index 00000000..1cf204cd --- /dev/null +++ b/images/memory.svg @@ -0,0 +1,363 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + image/svg+xml + + + + + + + + + + Distributed memory + Shared memory + + + supercomputer + high-end workstation + small computing cluster + compute node + laptop + GPU(s) + + + memory + performance + + + + diff --git a/images/mid_sweden_university.png b/images/mid_sweden_university.png new file mode 100644 index 00000000..fd33e0f4 Binary files /dev/null and b/images/mid_sweden_university.png differ diff --git a/images/ml-show-buildenv-v2.png b/images/ml-show-buildenv-v2.png new file mode 100644 index 00000000..925c5405 Binary files /dev/null and b/images/ml-show-buildenv-v2.png differ diff --git a/images/naiss.png b/images/naiss.png new file mode 100644 index 00000000..59b7c673 Binary files /dev/null and b/images/naiss.png differ diff --git a/images/nano.png b/images/nano.png new file mode 100644 index 00000000..f6218c2b Binary files /dev/null and b/images/nano.png differ diff --git a/images/permissions.png b/images/permissions.png new file mode 100644 index 00000000..83a1787b Binary files /dev/null and b/images/permissions.png differ diff --git a/images/prace.png b/images/prace.png new file mode 100644 index 00000000..5c191f07 Binary files /dev/null and b/images/prace.png differ diff --git a/images/putty-kebnekaise.png b/images/putty-kebnekaise.png new file mode 100644 index 00000000..0af8d9d2 Binary files /dev/null and b/images/putty-kebnekaise.png differ diff --git a/images/putty-login-kebnekaise.png b/images/putty-login-kebnekaise.png new file mode 100644 index 00000000..42d17391 Binary files /dev/null and b/images/putty-login-kebnekaise.png differ diff --git a/images/skills4eosc.png b/images/skills4eosc.png new file mode 100644 index 00000000..399e35b7 Binary files /dev/null and b/images/skills4eosc.png differ diff --git a/images/slu_new.png b/images/slu_new.png new file mode 100644 index 00000000..5dc67070 Binary files /dev/null and b/images/slu_new.png differ diff --git a/images/sm.pdf b/images/sm.pdf new file mode 100644 index 00000000..bd885028 Binary files /dev/null and b/images/sm.pdf differ diff --git a/images/sm.png b/images/sm.png new file mode 100644 index 00000000..e20b591e Binary files /dev/null and b/images/sm.png differ diff --git a/images/sm.svg b/images/sm.svg new file mode 100644 index 00000000..32b6b0f3 --- /dev/null +++ b/images/sm.svg @@ -0,0 +1,573 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + image/svg+xml + + + + + + + + + + + + + + + + + + + + + diff --git a/images/software.png b/images/software.png new file mode 100644 index 00000000..f407d548 Binary files /dev/null and b/images/software.png differ diff --git a/images/software.svg b/images/software.svg new file mode 100644 index 00000000..2b76ed04 --- /dev/null +++ b/images/software.svg @@ -0,0 +1 @@ +Created with Highcharts 5.0.14Core hour usage per software for Kebnekaise @ HPC2NPeriod: 2020-01-01 - 2020-12-31gromacs: 37.6 %gromacs: 37.6 %unclassified: 20.9 %unclassified: 20.9 %vasp: 20.5 %vasp: 20.5 %project_nobackup: 3.0 %project_nobackup: 3.0 %amber: 3.0 %amber: 3.0 %gaussian: 2.9 %gaussian: 2.9 %cp2k: 1.6 %cp2k: 1.6 %psi4: 1.3 %psi4: 1.3 %siesta: 1.3 %siesta: 1.3 %qsource: 1.0 %qsource: 1.0 %other: 6.9 %other: 6.9 %Highcharts.com \ No newline at end of file diff --git a/images/software_v2.png b/images/software_v2.png new file mode 100644 index 00000000..53f8de4d Binary files /dev/null and b/images/software_v2.png differ diff --git a/images/storage-members.png b/images/storage-members.png new file mode 100644 index 00000000..d47a4142 Binary files /dev/null and b/images/storage-members.png differ diff --git a/images/terminal.png b/images/terminal.png new file mode 100644 index 00000000..8aa7d91f Binary files /dev/null and b/images/terminal.png differ diff --git a/images/thinlinc.png b/images/thinlinc.png new file mode 100644 index 00000000..b41d34d8 Binary files /dev/null and b/images/thinlinc.png differ diff --git a/images/to-link.png b/images/to-link.png new file mode 100644 index 00000000..d7467370 Binary files /dev/null and b/images/to-link.png differ diff --git a/images/tree.png b/images/tree.png new file mode 100644 index 00000000..f25aa855 Binary files /dev/null and b/images/tree.png differ diff --git a/images/umu-logo-left-EN.png b/images/umu-logo-left-EN.png new file mode 100644 index 00000000..59aa8d83 Binary files /dev/null and b/images/umu-logo-left-EN.png differ diff --git a/images/umu-logotyp-EN.png b/images/umu-logotyp-EN.png new file mode 100644 index 00000000..8f617ce8 Binary files /dev/null and b/images/umu-logotyp-EN.png differ diff --git a/img/favicon.ico b/img/favicon.ico new file mode 100644 index 00000000..e85006a3 Binary files /dev/null and b/img/favicon.ico differ diff --git a/index.html b/index.html new file mode 100644 index 00000000..f4f55b26 --- /dev/null +++ b/index.html @@ -0,0 +1,296 @@ + + + + + + + + Introduction to Kebnekaise + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
    +
  • + +
  • +
  • +
+
+
+
+
+ +

Welcome to the course: Introduction to Kebnekaise

+
+

This material

+

Here you will find the content of the workshop “Introduction to Kebnekaise”.

+

You can download the markdown files for the presentation as well as the exercises from https://github.com/hpc2n/intro-course

+
    +
  • Click the gren “Code” button
      +
    • Either copy the url for the repo under HTTPS and do git clone https://github.com/hpc2n/intro-course.git in a terminal window
    • +
    • OR pick “Download zip” to get a zip file with the content.
    • +
    +
  • +
+

Some useful links:

+
    +
  • Documentation about Linux at HPC2N: https://docs.hpc2n.umu.se/tutorials/linuxguide/
  • +
  • Get started guide: https://docs.hpc2n.umu.se/tutorials/quickstart/
  • +
  • Documentation pages at HPC2N: https://docs.hpc2n.umu.se/
  • +
+
+
+

Prerequisites

+
    +
  • Basic knowledge about Linux (if you need a refresher, you could take the course “Introduction to Linux” which runs immediately before this course. Info and registration here: https://www.hpc2n.umu.se/events/courses/2024/fall/intro-linux.
  • +
  • An account at SUPR and at HPC2N. You should have already been contacted about getting these if you did not have them already.
  • +
+
+
+

Content

+
    +
  • This course aims to give a brief, but comprehensive introduction to Kebnekaise.
  • +
  • You will learn about
  • +
  • HPC2N, HPC, and Kebnekaise hardware
  • +
  • How to use our systems:
      +
    • Logging in & editors
    • +
    • The File System
    • +
    • The Module System
    • +
    • Compiling and linking
    • +
    • The Batch System
    • +
    +
  • +
  • Simple examples (batch system)
  • +
  • +

    Application examples (batch system)

    +
  • +
  • +

    This course will consist of lectures and type-alongs, as well as a few exercises where you get to try out what you have just learned.

    +
  • +
+
+

Instructors

+
    +
  • Birgitte Brydsö, HPC2N
  • +
  • Pedro Ojeda May, HPC2N
  • +
+

Preliminary schedule

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
TimeTopicActivity
11:15Welcome+Syllabus
11:25Introduction to Kebnekaise and HPC2NLecture
11:45Logging in & editorsLecture+exercise
11:55The File SystemLecture+code along
12:15LUNCH BREAK
13:15The Module SystemLecture+code along
13:35Compiling, compiler tool chainsLecture+code along+exercise
13:50The Batch SystemLecture+code along
14:10Simple ExamplesLecture+exercises
14:45COFFEE BREAK
15:00Application ExamplesLecture+code along+exercises
16:40Questions+Summary
17:00END OF COURSE
+ +
+
+ +
+
+ +
+ +
+ +
+ + + + + Next » + + +
+ + + + + + + + + + + + + diff --git a/intro/index.html b/intro/index.html new file mode 100644 index 00000000..7e60d37e --- /dev/null +++ b/intro/index.html @@ -0,0 +1,694 @@ + + + + + + + + Introduction to Kebnekaise and HPC2N - Introduction to Kebnekaise + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
    +
  • + +
  • +
  • +
+
+
+
+
+ +

Introduction to HPC2N, Kebnekaise and HPC

+ + +

umu-logo +naiss-logo +hpc2n-logo

+ +

HPC2N

+
+

Note

+

High Performance Computing Center North (HPC2N) is

+
    +
  • a competence center for Scientific and Parallel Computing
  • +
  • part of National Academic Infrastructure for Super­computing in Sweden (NAISS)
  • +
+
+

HPC2N provides state-of-the-art resources and expertise:

+
    +
  • Scalable and parallel HPC
  • +
  • Large-scale storage facilities (Project storage (Lustre), SweStore, Tape)
  • +
  • Grid and cloud computing (WLCG NT1, Swedish Science Cloud)
  • +
  • National Data Science Node in ”Epidemiology and Biology of Infections” (DDLS)
  • +
  • Software for e-Science applications
  • +
  • All levels of user support
      +
    • Primary, advanced, dedicated
    • +
    • Application Experts (AEs)
    • +
    +
  • +
+
+

Primary objective

+

To raise the national and local level of HPC competence and transfer HPC knowledge and technology to new users in academia and industry.

+
+

HPC2N partners

+

HPC2N is hosted by:

+

umu-logo

+



+

Partners:

+

irf-logo +ltu-logo +miun-logo +slu-logo

+


+

HPC2N funding and collaborations

+

Funded mainly by Umeå University, with contributions from the other HPC2N partners.

+

Involved in several projects and collaborations:

+

essence-logo +prace-logo +algoryx-logo

+

WLCG-logo +eosc-nordic-logo +eiscat-logo

+

scilifelab-logo +skills4eosc

+


+

HPC2N training and other services

+
    +
  • User support (primary, advanced, dedicated)
      +
    • Research group meetings @ UmU
    • +
    • Also at the partner sites
    • +
    • Online “HPC2N fika”
    • +
    +
  • +
  • User training and education program
      +
    • 0.5 – 5 days; ready-to-run exercises
    • +
    • Introduction to Linux, 16 September 2024
    • +
    • Introduction to HPC2N and Kebnekaise, 16 September 2024
    • +
    • Parallel programming and tools (OpenMP, MPI, debugging, perf. analyzers, Matlab, R, MD simulation, ML, GPU, …)
    • +
    • Basic Singularity, 16 October 2024
    • +
    • Introduction to running R, Python, Julia, and Matlab in HPC, 22-25 October 2024
    • +
    • Introduction to Git, 25-29 November 2024
    • +
    • Using Python in an HPC environment, 5-6 December 2024
    • +
    • Updated list: https://www.hpc2n.umu.se/events/courses
    • +
    +
  • +
  • Workshops and seminars
  • +
  • NGSSC / SeSE & university courses
  • +
+

HPC2N personnel

+

Management:

+
    +
  • Paolo Bientinesi, director
  • +
  • Björn Torkelsson, deputy director
  • +
  • Lena Hellman, administrator
  • +
+

Application experts:

+
    +
  • Jerry Eriksson
  • +
  • Pedro Ojeda May
  • +
  • Birgitte Brydsö
  • +
  • Åke Sandgren
  • +
+

Others:

+
    +
  • Mikael Rännar (WLCG coord)
  • +
  • Research Engineers under DDLS, HPC2N/SciLifeLab
      +
    • Paul Dulaud, System Developer, IT
    • +
    • Abdullah Aziz, Data Engineer
    • +
    • Data Steward
    • +
    +
  • +
+

System and support:

+
    +
  • Erik Andersson
  • +
  • Birgitte Brydsö
  • +
  • Niklas Edmundsson (Tape coord)
  • +
  • My Karlsson
  • +
  • Roger Oscarsson
  • +
  • Åke Sandgren
  • +
  • Mattias Wadenstein (NeIC, Tier1)
  • +
  • Lars Viklund
  • +
+

HPC2N application experts

+
    +
  • HPC2N provides advanced and dedicated support in the form of Application Experts (AEs):
      +
    • Jerry Eriksson: Profiling, Machine learning (DNN), MPI, OpenMP, OpenACC
    • +
    • Pedro Ojeda May: Molecular dynamics, Profiling, QM/MM, NAMD, Amber, Gromacs, GAUSSIAN, R, Python
    • +
    • Åke Sandgren: General high level programming assistance, VASP, Gromacs, Amber
    • +
    • Birgitte Brydsö: General HPC, R, Python
    • +
    +
  • +
  • Contact through regular support
  • +
+

HPC2N users by discipline

+
    +
  • Users from several scientific disciplines:
      +
    • Biosciences and medicine
    • +
    • Chemistry
    • +
    • Computing science
    • +
    • Engineering
    • +
    • Materials science
    • +
    • Mathematics and statistics
    • +
    • Physics including space physics
    • +
    • ML, DL, and other AI
    • +
    +
  • +
+

HPC2N users by discipline, largest users

+
    +
  • Users from several scientific disciplines:
      +
    • Biosciences and medicine
    • +
    • Chemistry
    • +
    • Computing science
    • +
    • Engineering
    • +
    • Materials science
    • +
    • Mathematics and statistics
    • +
    • Physics including space physics
    • +
    • Machine learning and artificial intelligence (several new projects)
    • +
    +
  • +
+

HPC2N users by software

+

software-users

+

Kebnekaise

+

The current supercomputer at HPC2N. It is a very heterogeneous system.

+
    +
  • Named after a massif (contains some of Sweden’s highest mountain peaks)
  • +
  • Kebnekaise was
      +
    • delivered by Lenovo and
    • +
    • installed during the summer 2016
    • +
    • Opened up for general availability on November 7, 2016
    • +
    • In 2018, Kebnekaise was extended with
        +
      • 52 Intel Xeon Gold 6132 (Skylake) nodes, as well as
      • +
      • 10 NVidian V100 (Volta) GPU nodes
      • +
      +
    • +
    • In 2023, Kebnekaise was extended with
        +
      • 2 dual NVIDIA A100 GPU nodes
      • +
      • one many-core AMD Zen3 CPU node
      • +
      +
    • +
    +
  • +
+

Kebnekaise will be continuosly upgraded, as old hardware gets retired.

+
    +
  • In 2024 Kebnekaise was extended with
      +
    • 2 Dual socket GPU-nodes: Lenovo ThinkSystem SR675 V3
        +
      • 2 x AMD EPYC 9454 48C 290W 2.75GHz Processor
      • +
      • 768GB [24x 32GB TruDDR5 4800MHz RDIMM-A]
      • +
      • 1 x 3.84TB Read Intensive NVMe PCIe 4.0 x4 HS SSD
      • +
      • 1 x NVIDIA H100 SXM5 700W 80G HBM3 GPU Board
      • +
      +
    • +
    • 10 dual-socket GPU-nodes: ThinkSystem SR665 V3
        +
      • 2 x AMD EPYC 9254 24C 200W 2.9GHz Processor
      • +
      • 384GB [24x 16GB TruDDR5 4800MHz RDIMM-A]
      • +
      • 1 x 1.92TB Read Intensive NVMe PCIe 5.0 x4 HS SSD
      • +
      • 2 x NVIDIA L40S 48GB PCIe Gen4 Passive GPU
      • +
      +
    • +
    • 8 dual-socket CPU only: ThinkSystem SR645 V3
        +
      • 2 x AMD EPYC 9754 128C 360W 2.25GHz Processor
      • +
      • 768GB [24x 32GB TruDDR5 4800MHz RDIMM-A]
      • +
      • 1 x 1 3.84TB Read Intensive NVMe PCIe 4.0 x4 HS SSD
      • +
      +
    • +
    +
  • +
+

Current hardware in Kebnekaise

+

Kebnekaise have CPU-only, GPU enabled and large memory nodes.

+

The CPU-only nodes are:

+
    +
  • 2 x 14 core Intel broadwell
      +
    • 4460 MB memory / core
    • +
    • 48 nodes
    • +
    • Total of 41.6 TFlops/s
    • +
    +
  • +
  • 2 x 14 core Intel skylake
      +
    • 6785 MB memory / core
    • +
    • 52 nodes
    • +
    • Total of 87 TFlops/s
    • +
    +
  • +
  • 2 x 64 core AMD zen3
      +
    • 8020 MB / core
    • +
    • 1 node
    • +
    • Total of 11 TFlops/s
    • +
    +
  • +
  • 2 x 128 core AMD zen4
      +
    • 2516 MB / core
    • +
    • 8 nodes
    • +
    • Total of 216 TFlops/s
    • +
    +
  • +
+

The GPU enabled nodes are:

+
    +
  • 2 x 14 core Intel broadwell
      +
    • 9000 MB memory / core
    • +
    • 2 x Nvidia A40
    • +
    • 4 nodes
    • +
    • Total of 83 TFlops/s
    • +
    +
  • +
  • 2 x 14 core Intel skylake
      +
    • 6785 MB memory / core
    • +
    • 2 x Nvidia V100
    • +
    • 10 nodes
    • +
    • Total of 75 TFlops/s
    • +
    +
  • +
  • 2 x 24 core AMD zen3
      +
    • 10600 MB / core
    • +
    • 2 x Nvidia A100
    • +
    • 2 nodes
    • +
    +
  • +
  • 2 x 24 core AMD zen3
      +
    • 10600 MB / core
    • +
    • 2 x AMD MI100
    • +
    • 1 node
    • +
    +
  • +
  • 2 x 24 core AMD zen4
      +
    • 6630 MB / core
    • +
    • 2 x Nvidia A6000
    • +
    • 1 node
    • +
    +
  • +
  • 2 x 24 core AMD zen4
      +
    • 6630 MB / core
    • +
    • 2 x Nvidia L40s
    • +
    • 10 nodes
    • +
    +
  • +
  • 2 x 48 core AMD zen4
      +
    • 6630 MB / core
    • +
    • 4 x Nvidia H100 SXM5
    • +
    • 2 nodes
    • +
    +
  • +
+

The large memory nodes are:

+
    +
  • 4 x 18 core Intel broadwell
      +
    • 41666 MB memory / core
    • +
    • 8 nodes
    • +
    • Total of 13.6 TFlops/s for all these nodes
    • +
    +
  • +
+

Kebnekaise - HPC2N storage

+

Basically four types of storage are available at HPC2N:

+
    +
  • Home directory
      +
    • /home/X/Xyz, $HOME, ~
    • +
    • 25 GB, user owned
    • +
    +
  • +
  • Project storage
      +
    • /proj/nobackup/abc
    • +
    • Shared among project members
    • +
    +
  • +
  • Local scratch space
      +
    • $SNIC_TMP
    • +
    • SSD (170GB), per job, per node, “volatile”
    • +
    +
  • +
  • Tape Storage
      +
    • Backup
    • +
    • Long term storage
    • +
    +
  • +
+

Also

+
    +
  • SweStore — disk based (dCache)
      +
    • Research Data Storage Infrastructure, for active research data and operated by NAISS, WLCG
    • +
    +
  • +
+

Kebnekaise - projects

+
+

Compute projects

+

To use Kebnekaise, you must be a member of a compute project.

+
    +
  • A compute project has a certain number of core hours allocated for it per month
  • +
  • A regular CPU core cost 1 core hour per hour, other resources (e.g., GPUs) cost more
  • +
  • Not a hard limit but projects that go over the allocation get lower priority
  • +
+
+

A compute project contains a certain amount of storage. If more storage is required, you must be a member of a storage project.

+
+

Note

+

As Kebnekaise is a local cluster, you need to be affiliated with UmU, IRF, SLU, Miun, or LTU to use it.

+
+

Projects are applied for through SUPR (https://supr.naiss.se).

+

I will cover more details in a later section, where we go more into detail about HPC2N and Kebnekaise.

+

HPC

+
+

What is HPC?

+

High Performance Computing (definition)

+

“High Performance Computing most generally refers to the practice of aggregating computing power in a way that delivers much higher performance than one could get out of a typical desktop computer or workstation in order to solve large problems in science, engineering, or business.”

+

From: https://insidehpc.com/hpc-basic-training/what-is-hpc/

+
+

High Performance Computing - opening the definition

+

Aggregating computing power

+
    +
  • 147 nodes totalling 6808 cores and ??? CUDA cores
      +
    • Compared to 4 cores in a modern laptop
    • +
    +
  • +
+

Higher performance

+
    +
  • More than 527,000,000,000,000 arithmetical operations per second (527 trillion (billion)
      +
    • Compared to 200,000,000,000 Flops in a modern laptop (200 billion (milliard)
    • +
    +
  • +
+

Solve large problems

+
    +
  • When does a problem become large enough for HPC?
  • +
  • Are there other reasons for using HPC resources? (Memory, software, support, etc.)
  • +
+

High Performance Computing - large problems

+

A problem can be large for two main reasons:

+
    +
  • Execution time: The time required to form a solution to the problem is very long
  • +
  • Memory / storage use: The solution of the problem requires a lot of memory and/or storage
  • +
+

The former can be remedied by increasing the performance

+
    +
  • More cores, more nodes, GPUs, …
  • +
+

The latter by adding more memory / storage

+
    +
  • More memory per node (including large memory nodes), more nodes, …
      +
    • Kebnekaise: 128GB - 192GB, 384GB, 512GB, 768GB, 3TB
    • +
    +
  • +
  • Large storage solutions, …
  • +
+

High Performance Computing - what counts as HPC

+

hpc

+



+

High Performance Computing - other reasons

+
    +
  • Specialized (expensive) hardware
      +
    • GPUs, including those optimized for AI
        +
      • Kebnekaise has V100, A100, A40, MI100, A6000, L40S, H100
      • +
      +
    • +
    • High-end CPUs (AVX-512 etc) and ECC memory
    • +
    +
  • +
  • Software
      +
    • HPC2N holds licenses for several softwares
    • +
    • Software is pre-configured and ready-to-use
    • +
    +
  • +
  • Support and documentation
  • +
+

High Performance Computing - memory models

+

Two memory models are relevant for HPC:

+
    +
  • Shared memory: Single memory space for all data. +sm +

      +
    • Everyone can access the same data
    • +
    • Straightforward to use
    • +
    +
  • +
  • Distributed memory: Multiple distinct memory spaces. +dm +

      +
    • Everyone has direct access only to the local data
    • +
    • Requires communication
    • +
    +
  • +
+

memory +

+

High Performance Computing - programming models

+

The programming model changes when we aim for extra performance and/or memory:

+
    +
  • Single-core: Matlab, Python, C, Fortran, …
      +
    • Single stream of operations
    • +
    +
  • +
  • Multi-core: Vectorized Matlab, pthreads, OpenMP
      +
    • Multiple streams of operations
    • +
    • Work distribution, coordination (synchronization, etc), …
    • +
    +
  • +
  • Distributed memory: MPI, …
      +
    • Multiple streams of operations
    • +
    • Work distribution, coordination (synchronization, etc), …
    • +
    • Data distribution and communication
    • +
    +
  • +
  • GPUs: CUDA, OpenCL, OpenACC, OpenMP, …
      +
    • Many lightweight streams of operations
    • +
    • Work distribution, coordination (synchronization, etc), …
    • +
    • Data distribution across memory spaces and movement
    • +
    +
  • +
+

High Performance Computing - software

+

Complexity grows when we aim for extra performance and/or memory/storage:

+
    +
  1. Single-core: LAPACK, …
      +
    • Load correct toolchain etc
    • +
    +
  2. +
  3. Multi-core: LAPACK + parallel BLAS, …
      +
    • Load correct toolchain etc
    • +
    • Allocate correct number of cores, configure software to use correct number of cores, …
    • +
    +
  4. +
  5. Distributed memory}: ScaLAPACK, …
      +
    • Load correct toolchain etc
    • +
    • Allocate correct number of nodes and cores, configure software to use correct number of nodes and cores, …
    • +
    • Data distribution, storage, …
    • +
    +
  6. +
  7. GPUs: MAGMA, TensorFlow, …
      +
    • Load correct toolchain etc
    • +
    • Allocate correct number of cores and GPUs, configure software to use correct number of cores and GPUs, …
    • +
    +
  8. +
+ +
+
+ +
+
+ +
+ +
+ +
+ + + + « Previous + + + Next » + + +
+ + + + + + + + + + + diff --git a/js/html5shiv.min.js b/js/html5shiv.min.js new file mode 100644 index 00000000..1a01c94b --- /dev/null +++ b/js/html5shiv.min.js @@ -0,0 +1,4 @@ +/** +* @preserve HTML5 Shiv 3.7.3 | @afarkas @jdalton @jon_neal @rem | MIT/GPL2 Licensed +*/ +!function(a,b){function c(a,b){var c=a.createElement("p"),d=a.getElementsByTagName("head")[0]||a.documentElement;return c.innerHTML="x",d.insertBefore(c.lastChild,d.firstChild)}function d(){var a=t.elements;return"string"==typeof a?a.split(" "):a}function e(a,b){var c=t.elements;"string"!=typeof c&&(c=c.join(" ")),"string"!=typeof a&&(a=a.join(" ")),t.elements=c+" "+a,j(b)}function f(a){var b=s[a[q]];return b||(b={},r++,a[q]=r,s[r]=b),b}function g(a,c,d){if(c||(c=b),l)return c.createElement(a);d||(d=f(c));var e;return e=d.cache[a]?d.cache[a].cloneNode():p.test(a)?(d.cache[a]=d.createElem(a)).cloneNode():d.createElem(a),!e.canHaveChildren||o.test(a)||e.tagUrn?e:d.frag.appendChild(e)}function h(a,c){if(a||(a=b),l)return a.createDocumentFragment();c=c||f(a);for(var e=c.frag.cloneNode(),g=0,h=d(),i=h.length;i>g;g++)e.createElement(h[g]);return e}function i(a,b){b.cache||(b.cache={},b.createElem=a.createElement,b.createFrag=a.createDocumentFragment,b.frag=b.createFrag()),a.createElement=function(c){return t.shivMethods?g(c,a,b):b.createElem(c)},a.createDocumentFragment=Function("h,f","return function(){var n=f.cloneNode(),c=n.createElement;h.shivMethods&&("+d().join().replace(/[\w\-:]+/g,function(a){return b.createElem(a),b.frag.createElement(a),'c("'+a+'")'})+");return n}")(t,b.frag)}function j(a){a||(a=b);var d=f(a);return!t.shivCSS||k||d.hasCSS||(d.hasCSS=!!c(a,"article,aside,dialog,figcaption,figure,footer,header,hgroup,main,nav,section{display:block}mark{background:#FF0;color:#000}template{display:none}")),l||i(a,d),a}var k,l,m="3.7.3",n=a.html5||{},o=/^<|^(?:button|map|select|textarea|object|iframe|option|optgroup)$/i,p=/^(?:a|b|code|div|fieldset|h1|h2|h3|h4|h5|h6|i|label|li|ol|p|q|span|strong|style|table|tbody|td|th|tr|ul)$/i,q="_html5shiv",r=0,s={};!function(){try{var a=b.createElement("a");a.innerHTML="",k="hidden"in a,l=1==a.childNodes.length||function(){b.createElement("a");var a=b.createDocumentFragment();return"undefined"==typeof a.cloneNode||"undefined"==typeof a.createDocumentFragment||"undefined"==typeof a.createElement}()}catch(c){k=!0,l=!0}}();var t={elements:n.elements||"abbr article aside audio bdi canvas data datalist details dialog figcaption figure footer header hgroup main mark meter nav output picture progress section summary template time video",version:m,shivCSS:n.shivCSS!==!1,supportsUnknownElements:l,shivMethods:n.shivMethods!==!1,type:"default",shivDocument:j,createElement:g,createDocumentFragment:h,addElements:e};a.html5=t,j(b),"object"==typeof module&&module.exports&&(module.exports=t)}("undefined"!=typeof window?window:this,document); diff --git a/js/jquery-3.6.0.min.js b/js/jquery-3.6.0.min.js new file mode 100644 index 00000000..c4c6022f --- /dev/null +++ b/js/jquery-3.6.0.min.js @@ -0,0 +1,2 @@ +/*! jQuery v3.6.0 | (c) OpenJS Foundation and other contributors | jquery.org/license */ +!function(e,t){"use strict";"object"==typeof module&&"object"==typeof module.exports?module.exports=e.document?t(e,!0):function(e){if(!e.document)throw new Error("jQuery requires a window with a document");return t(e)}:t(e)}("undefined"!=typeof window?window:this,function(C,e){"use strict";var t=[],r=Object.getPrototypeOf,s=t.slice,g=t.flat?function(e){return t.flat.call(e)}:function(e){return t.concat.apply([],e)},u=t.push,i=t.indexOf,n={},o=n.toString,v=n.hasOwnProperty,a=v.toString,l=a.call(Object),y={},m=function(e){return"function"==typeof e&&"number"!=typeof e.nodeType&&"function"!=typeof e.item},x=function(e){return null!=e&&e===e.window},E=C.document,c={type:!0,src:!0,nonce:!0,noModule:!0};function b(e,t,n){var r,i,o=(n=n||E).createElement("script");if(o.text=e,t)for(r in c)(i=t[r]||t.getAttribute&&t.getAttribute(r))&&o.setAttribute(r,i);n.head.appendChild(o).parentNode.removeChild(o)}function w(e){return null==e?e+"":"object"==typeof e||"function"==typeof e?n[o.call(e)]||"object":typeof e}var f="3.6.0",S=function(e,t){return new S.fn.init(e,t)};function p(e){var t=!!e&&"length"in e&&e.length,n=w(e);return!m(e)&&!x(e)&&("array"===n||0===t||"number"==typeof t&&0+~]|"+M+")"+M+"*"),U=new RegExp(M+"|>"),X=new RegExp(F),V=new RegExp("^"+I+"$"),G={ID:new RegExp("^#("+I+")"),CLASS:new RegExp("^\\.("+I+")"),TAG:new RegExp("^("+I+"|[*])"),ATTR:new RegExp("^"+W),PSEUDO:new RegExp("^"+F),CHILD:new RegExp("^:(only|first|last|nth|nth-last)-(child|of-type)(?:\\("+M+"*(even|odd|(([+-]|)(\\d*)n|)"+M+"*(?:([+-]|)"+M+"*(\\d+)|))"+M+"*\\)|)","i"),bool:new RegExp("^(?:"+R+")$","i"),needsContext:new RegExp("^"+M+"*[>+~]|:(even|odd|eq|gt|lt|nth|first|last)(?:\\("+M+"*((?:-\\d)?\\d*)"+M+"*\\)|)(?=[^-]|$)","i")},Y=/HTML$/i,Q=/^(?:input|select|textarea|button)$/i,J=/^h\d$/i,K=/^[^{]+\{\s*\[native \w/,Z=/^(?:#([\w-]+)|(\w+)|\.([\w-]+))$/,ee=/[+~]/,te=new RegExp("\\\\[\\da-fA-F]{1,6}"+M+"?|\\\\([^\\r\\n\\f])","g"),ne=function(e,t){var n="0x"+e.slice(1)-65536;return t||(n<0?String.fromCharCode(n+65536):String.fromCharCode(n>>10|55296,1023&n|56320))},re=/([\0-\x1f\x7f]|^-?\d)|^-$|[^\0-\x1f\x7f-\uFFFF\w-]/g,ie=function(e,t){return t?"\0"===e?"\ufffd":e.slice(0,-1)+"\\"+e.charCodeAt(e.length-1).toString(16)+" ":"\\"+e},oe=function(){T()},ae=be(function(e){return!0===e.disabled&&"fieldset"===e.nodeName.toLowerCase()},{dir:"parentNode",next:"legend"});try{H.apply(t=O.call(p.childNodes),p.childNodes),t[p.childNodes.length].nodeType}catch(e){H={apply:t.length?function(e,t){L.apply(e,O.call(t))}:function(e,t){var n=e.length,r=0;while(e[n++]=t[r++]);e.length=n-1}}}function se(t,e,n,r){var i,o,a,s,u,l,c,f=e&&e.ownerDocument,p=e?e.nodeType:9;if(n=n||[],"string"!=typeof t||!t||1!==p&&9!==p&&11!==p)return n;if(!r&&(T(e),e=e||C,E)){if(11!==p&&(u=Z.exec(t)))if(i=u[1]){if(9===p){if(!(a=e.getElementById(i)))return n;if(a.id===i)return n.push(a),n}else if(f&&(a=f.getElementById(i))&&y(e,a)&&a.id===i)return n.push(a),n}else{if(u[2])return H.apply(n,e.getElementsByTagName(t)),n;if((i=u[3])&&d.getElementsByClassName&&e.getElementsByClassName)return H.apply(n,e.getElementsByClassName(i)),n}if(d.qsa&&!N[t+" "]&&(!v||!v.test(t))&&(1!==p||"object"!==e.nodeName.toLowerCase())){if(c=t,f=e,1===p&&(U.test(t)||z.test(t))){(f=ee.test(t)&&ye(e.parentNode)||e)===e&&d.scope||((s=e.getAttribute("id"))?s=s.replace(re,ie):e.setAttribute("id",s=S)),o=(l=h(t)).length;while(o--)l[o]=(s?"#"+s:":scope")+" "+xe(l[o]);c=l.join(",")}try{return H.apply(n,f.querySelectorAll(c)),n}catch(e){N(t,!0)}finally{s===S&&e.removeAttribute("id")}}}return g(t.replace($,"$1"),e,n,r)}function ue(){var r=[];return function e(t,n){return r.push(t+" ")>b.cacheLength&&delete e[r.shift()],e[t+" "]=n}}function le(e){return e[S]=!0,e}function ce(e){var t=C.createElement("fieldset");try{return!!e(t)}catch(e){return!1}finally{t.parentNode&&t.parentNode.removeChild(t),t=null}}function fe(e,t){var n=e.split("|"),r=n.length;while(r--)b.attrHandle[n[r]]=t}function pe(e,t){var n=t&&e,r=n&&1===e.nodeType&&1===t.nodeType&&e.sourceIndex-t.sourceIndex;if(r)return r;if(n)while(n=n.nextSibling)if(n===t)return-1;return e?1:-1}function de(t){return function(e){return"input"===e.nodeName.toLowerCase()&&e.type===t}}function he(n){return function(e){var t=e.nodeName.toLowerCase();return("input"===t||"button"===t)&&e.type===n}}function ge(t){return function(e){return"form"in e?e.parentNode&&!1===e.disabled?"label"in e?"label"in e.parentNode?e.parentNode.disabled===t:e.disabled===t:e.isDisabled===t||e.isDisabled!==!t&&ae(e)===t:e.disabled===t:"label"in e&&e.disabled===t}}function ve(a){return le(function(o){return o=+o,le(function(e,t){var n,r=a([],e.length,o),i=r.length;while(i--)e[n=r[i]]&&(e[n]=!(t[n]=e[n]))})})}function ye(e){return e&&"undefined"!=typeof e.getElementsByTagName&&e}for(e in d=se.support={},i=se.isXML=function(e){var t=e&&e.namespaceURI,n=e&&(e.ownerDocument||e).documentElement;return!Y.test(t||n&&n.nodeName||"HTML")},T=se.setDocument=function(e){var t,n,r=e?e.ownerDocument||e:p;return r!=C&&9===r.nodeType&&r.documentElement&&(a=(C=r).documentElement,E=!i(C),p!=C&&(n=C.defaultView)&&n.top!==n&&(n.addEventListener?n.addEventListener("unload",oe,!1):n.attachEvent&&n.attachEvent("onunload",oe)),d.scope=ce(function(e){return a.appendChild(e).appendChild(C.createElement("div")),"undefined"!=typeof e.querySelectorAll&&!e.querySelectorAll(":scope fieldset div").length}),d.attributes=ce(function(e){return e.className="i",!e.getAttribute("className")}),d.getElementsByTagName=ce(function(e){return e.appendChild(C.createComment("")),!e.getElementsByTagName("*").length}),d.getElementsByClassName=K.test(C.getElementsByClassName),d.getById=ce(function(e){return a.appendChild(e).id=S,!C.getElementsByName||!C.getElementsByName(S).length}),d.getById?(b.filter.ID=function(e){var t=e.replace(te,ne);return function(e){return e.getAttribute("id")===t}},b.find.ID=function(e,t){if("undefined"!=typeof t.getElementById&&E){var n=t.getElementById(e);return n?[n]:[]}}):(b.filter.ID=function(e){var n=e.replace(te,ne);return function(e){var t="undefined"!=typeof e.getAttributeNode&&e.getAttributeNode("id");return t&&t.value===n}},b.find.ID=function(e,t){if("undefined"!=typeof t.getElementById&&E){var n,r,i,o=t.getElementById(e);if(o){if((n=o.getAttributeNode("id"))&&n.value===e)return[o];i=t.getElementsByName(e),r=0;while(o=i[r++])if((n=o.getAttributeNode("id"))&&n.value===e)return[o]}return[]}}),b.find.TAG=d.getElementsByTagName?function(e,t){return"undefined"!=typeof t.getElementsByTagName?t.getElementsByTagName(e):d.qsa?t.querySelectorAll(e):void 0}:function(e,t){var n,r=[],i=0,o=t.getElementsByTagName(e);if("*"===e){while(n=o[i++])1===n.nodeType&&r.push(n);return r}return o},b.find.CLASS=d.getElementsByClassName&&function(e,t){if("undefined"!=typeof t.getElementsByClassName&&E)return t.getElementsByClassName(e)},s=[],v=[],(d.qsa=K.test(C.querySelectorAll))&&(ce(function(e){var t;a.appendChild(e).innerHTML="",e.querySelectorAll("[msallowcapture^='']").length&&v.push("[*^$]="+M+"*(?:''|\"\")"),e.querySelectorAll("[selected]").length||v.push("\\["+M+"*(?:value|"+R+")"),e.querySelectorAll("[id~="+S+"-]").length||v.push("~="),(t=C.createElement("input")).setAttribute("name",""),e.appendChild(t),e.querySelectorAll("[name='']").length||v.push("\\["+M+"*name"+M+"*="+M+"*(?:''|\"\")"),e.querySelectorAll(":checked").length||v.push(":checked"),e.querySelectorAll("a#"+S+"+*").length||v.push(".#.+[+~]"),e.querySelectorAll("\\\f"),v.push("[\\r\\n\\f]")}),ce(function(e){e.innerHTML="";var t=C.createElement("input");t.setAttribute("type","hidden"),e.appendChild(t).setAttribute("name","D"),e.querySelectorAll("[name=d]").length&&v.push("name"+M+"*[*^$|!~]?="),2!==e.querySelectorAll(":enabled").length&&v.push(":enabled",":disabled"),a.appendChild(e).disabled=!0,2!==e.querySelectorAll(":disabled").length&&v.push(":enabled",":disabled"),e.querySelectorAll("*,:x"),v.push(",.*:")})),(d.matchesSelector=K.test(c=a.matches||a.webkitMatchesSelector||a.mozMatchesSelector||a.oMatchesSelector||a.msMatchesSelector))&&ce(function(e){d.disconnectedMatch=c.call(e,"*"),c.call(e,"[s!='']:x"),s.push("!=",F)}),v=v.length&&new RegExp(v.join("|")),s=s.length&&new RegExp(s.join("|")),t=K.test(a.compareDocumentPosition),y=t||K.test(a.contains)?function(e,t){var n=9===e.nodeType?e.documentElement:e,r=t&&t.parentNode;return e===r||!(!r||1!==r.nodeType||!(n.contains?n.contains(r):e.compareDocumentPosition&&16&e.compareDocumentPosition(r)))}:function(e,t){if(t)while(t=t.parentNode)if(t===e)return!0;return!1},j=t?function(e,t){if(e===t)return l=!0,0;var n=!e.compareDocumentPosition-!t.compareDocumentPosition;return n||(1&(n=(e.ownerDocument||e)==(t.ownerDocument||t)?e.compareDocumentPosition(t):1)||!d.sortDetached&&t.compareDocumentPosition(e)===n?e==C||e.ownerDocument==p&&y(p,e)?-1:t==C||t.ownerDocument==p&&y(p,t)?1:u?P(u,e)-P(u,t):0:4&n?-1:1)}:function(e,t){if(e===t)return l=!0,0;var n,r=0,i=e.parentNode,o=t.parentNode,a=[e],s=[t];if(!i||!o)return e==C?-1:t==C?1:i?-1:o?1:u?P(u,e)-P(u,t):0;if(i===o)return pe(e,t);n=e;while(n=n.parentNode)a.unshift(n);n=t;while(n=n.parentNode)s.unshift(n);while(a[r]===s[r])r++;return r?pe(a[r],s[r]):a[r]==p?-1:s[r]==p?1:0}),C},se.matches=function(e,t){return se(e,null,null,t)},se.matchesSelector=function(e,t){if(T(e),d.matchesSelector&&E&&!N[t+" "]&&(!s||!s.test(t))&&(!v||!v.test(t)))try{var n=c.call(e,t);if(n||d.disconnectedMatch||e.document&&11!==e.document.nodeType)return n}catch(e){N(t,!0)}return 0":{dir:"parentNode",first:!0}," ":{dir:"parentNode"},"+":{dir:"previousSibling",first:!0},"~":{dir:"previousSibling"}},preFilter:{ATTR:function(e){return e[1]=e[1].replace(te,ne),e[3]=(e[3]||e[4]||e[5]||"").replace(te,ne),"~="===e[2]&&(e[3]=" "+e[3]+" "),e.slice(0,4)},CHILD:function(e){return e[1]=e[1].toLowerCase(),"nth"===e[1].slice(0,3)?(e[3]||se.error(e[0]),e[4]=+(e[4]?e[5]+(e[6]||1):2*("even"===e[3]||"odd"===e[3])),e[5]=+(e[7]+e[8]||"odd"===e[3])):e[3]&&se.error(e[0]),e},PSEUDO:function(e){var t,n=!e[6]&&e[2];return G.CHILD.test(e[0])?null:(e[3]?e[2]=e[4]||e[5]||"":n&&X.test(n)&&(t=h(n,!0))&&(t=n.indexOf(")",n.length-t)-n.length)&&(e[0]=e[0].slice(0,t),e[2]=n.slice(0,t)),e.slice(0,3))}},filter:{TAG:function(e){var t=e.replace(te,ne).toLowerCase();return"*"===e?function(){return!0}:function(e){return e.nodeName&&e.nodeName.toLowerCase()===t}},CLASS:function(e){var t=m[e+" "];return t||(t=new RegExp("(^|"+M+")"+e+"("+M+"|$)"))&&m(e,function(e){return t.test("string"==typeof e.className&&e.className||"undefined"!=typeof e.getAttribute&&e.getAttribute("class")||"")})},ATTR:function(n,r,i){return function(e){var t=se.attr(e,n);return null==t?"!="===r:!r||(t+="","="===r?t===i:"!="===r?t!==i:"^="===r?i&&0===t.indexOf(i):"*="===r?i&&-1:\x20\t\r\n\f]*)[\x20\t\r\n\f]*\/?>(?:<\/\1>|)$/i;function j(e,n,r){return m(n)?S.grep(e,function(e,t){return!!n.call(e,t,e)!==r}):n.nodeType?S.grep(e,function(e){return e===n!==r}):"string"!=typeof n?S.grep(e,function(e){return-1)[^>]*|#([\w-]+))$/;(S.fn.init=function(e,t,n){var r,i;if(!e)return this;if(n=n||D,"string"==typeof e){if(!(r="<"===e[0]&&">"===e[e.length-1]&&3<=e.length?[null,e,null]:q.exec(e))||!r[1]&&t)return!t||t.jquery?(t||n).find(e):this.constructor(t).find(e);if(r[1]){if(t=t instanceof S?t[0]:t,S.merge(this,S.parseHTML(r[1],t&&t.nodeType?t.ownerDocument||t:E,!0)),N.test(r[1])&&S.isPlainObject(t))for(r in t)m(this[r])?this[r](t[r]):this.attr(r,t[r]);return this}return(i=E.getElementById(r[2]))&&(this[0]=i,this.length=1),this}return e.nodeType?(this[0]=e,this.length=1,this):m(e)?void 0!==n.ready?n.ready(e):e(S):S.makeArray(e,this)}).prototype=S.fn,D=S(E);var L=/^(?:parents|prev(?:Until|All))/,H={children:!0,contents:!0,next:!0,prev:!0};function O(e,t){while((e=e[t])&&1!==e.nodeType);return e}S.fn.extend({has:function(e){var t=S(e,this),n=t.length;return this.filter(function(){for(var e=0;e\x20\t\r\n\f]*)/i,he=/^$|^module$|\/(?:java|ecma)script/i;ce=E.createDocumentFragment().appendChild(E.createElement("div")),(fe=E.createElement("input")).setAttribute("type","radio"),fe.setAttribute("checked","checked"),fe.setAttribute("name","t"),ce.appendChild(fe),y.checkClone=ce.cloneNode(!0).cloneNode(!0).lastChild.checked,ce.innerHTML="",y.noCloneChecked=!!ce.cloneNode(!0).lastChild.defaultValue,ce.innerHTML="",y.option=!!ce.lastChild;var ge={thead:[1,"","
"],col:[2,"","
"],tr:[2,"","
"],td:[3,"","
"],_default:[0,"",""]};function ve(e,t){var n;return n="undefined"!=typeof e.getElementsByTagName?e.getElementsByTagName(t||"*"):"undefined"!=typeof e.querySelectorAll?e.querySelectorAll(t||"*"):[],void 0===t||t&&A(e,t)?S.merge([e],n):n}function ye(e,t){for(var n=0,r=e.length;n",""]);var me=/<|&#?\w+;/;function xe(e,t,n,r,i){for(var o,a,s,u,l,c,f=t.createDocumentFragment(),p=[],d=0,h=e.length;d\s*$/g;function je(e,t){return A(e,"table")&&A(11!==t.nodeType?t:t.firstChild,"tr")&&S(e).children("tbody")[0]||e}function De(e){return e.type=(null!==e.getAttribute("type"))+"/"+e.type,e}function qe(e){return"true/"===(e.type||"").slice(0,5)?e.type=e.type.slice(5):e.removeAttribute("type"),e}function Le(e,t){var n,r,i,o,a,s;if(1===t.nodeType){if(Y.hasData(e)&&(s=Y.get(e).events))for(i in Y.remove(t,"handle events"),s)for(n=0,r=s[i].length;n").attr(n.scriptAttrs||{}).prop({charset:n.scriptCharset,src:n.url}).on("load error",i=function(e){r.remove(),i=null,e&&t("error"===e.type?404:200,e.type)}),E.head.appendChild(r[0])},abort:function(){i&&i()}}});var _t,zt=[],Ut=/(=)\?(?=&|$)|\?\?/;S.ajaxSetup({jsonp:"callback",jsonpCallback:function(){var e=zt.pop()||S.expando+"_"+wt.guid++;return this[e]=!0,e}}),S.ajaxPrefilter("json jsonp",function(e,t,n){var r,i,o,a=!1!==e.jsonp&&(Ut.test(e.url)?"url":"string"==typeof e.data&&0===(e.contentType||"").indexOf("application/x-www-form-urlencoded")&&Ut.test(e.data)&&"data");if(a||"jsonp"===e.dataTypes[0])return r=e.jsonpCallback=m(e.jsonpCallback)?e.jsonpCallback():e.jsonpCallback,a?e[a]=e[a].replace(Ut,"$1"+r):!1!==e.jsonp&&(e.url+=(Tt.test(e.url)?"&":"?")+e.jsonp+"="+r),e.converters["script json"]=function(){return o||S.error(r+" was not called"),o[0]},e.dataTypes[0]="json",i=C[r],C[r]=function(){o=arguments},n.always(function(){void 0===i?S(C).removeProp(r):C[r]=i,e[r]&&(e.jsonpCallback=t.jsonpCallback,zt.push(r)),o&&m(i)&&i(o[0]),o=i=void 0}),"script"}),y.createHTMLDocument=((_t=E.implementation.createHTMLDocument("").body).innerHTML="
",2===_t.childNodes.length),S.parseHTML=function(e,t,n){return"string"!=typeof e?[]:("boolean"==typeof t&&(n=t,t=!1),t||(y.createHTMLDocument?((r=(t=E.implementation.createHTMLDocument("")).createElement("base")).href=E.location.href,t.head.appendChild(r)):t=E),o=!n&&[],(i=N.exec(e))?[t.createElement(i[1])]:(i=xe([e],t,o),o&&o.length&&S(o).remove(),S.merge([],i.childNodes)));var r,i,o},S.fn.load=function(e,t,n){var r,i,o,a=this,s=e.indexOf(" ");return-1").append(S.parseHTML(e)).find(r):e)}).always(n&&function(e,t){a.each(function(){n.apply(this,o||[e.responseText,t,e])})}),this},S.expr.pseudos.animated=function(t){return S.grep(S.timers,function(e){return t===e.elem}).length},S.offset={setOffset:function(e,t,n){var r,i,o,a,s,u,l=S.css(e,"position"),c=S(e),f={};"static"===l&&(e.style.position="relative"),s=c.offset(),o=S.css(e,"top"),u=S.css(e,"left"),("absolute"===l||"fixed"===l)&&-1<(o+u).indexOf("auto")?(a=(r=c.position()).top,i=r.left):(a=parseFloat(o)||0,i=parseFloat(u)||0),m(t)&&(t=t.call(e,n,S.extend({},s))),null!=t.top&&(f.top=t.top-s.top+a),null!=t.left&&(f.left=t.left-s.left+i),"using"in t?t.using.call(e,f):c.css(f)}},S.fn.extend({offset:function(t){if(arguments.length)return void 0===t?this:this.each(function(e){S.offset.setOffset(this,t,e)});var e,n,r=this[0];return r?r.getClientRects().length?(e=r.getBoundingClientRect(),n=r.ownerDocument.defaultView,{top:e.top+n.pageYOffset,left:e.left+n.pageXOffset}):{top:0,left:0}:void 0},position:function(){if(this[0]){var e,t,n,r=this[0],i={top:0,left:0};if("fixed"===S.css(r,"position"))t=r.getBoundingClientRect();else{t=this.offset(),n=r.ownerDocument,e=r.offsetParent||n.documentElement;while(e&&(e===n.body||e===n.documentElement)&&"static"===S.css(e,"position"))e=e.parentNode;e&&e!==r&&1===e.nodeType&&((i=S(e).offset()).top+=S.css(e,"borderTopWidth",!0),i.left+=S.css(e,"borderLeftWidth",!0))}return{top:t.top-i.top-S.css(r,"marginTop",!0),left:t.left-i.left-S.css(r,"marginLeft",!0)}}},offsetParent:function(){return this.map(function(){var e=this.offsetParent;while(e&&"static"===S.css(e,"position"))e=e.offsetParent;return e||re})}}),S.each({scrollLeft:"pageXOffset",scrollTop:"pageYOffset"},function(t,i){var o="pageYOffset"===i;S.fn[t]=function(e){return $(this,function(e,t,n){var r;if(x(e)?r=e:9===e.nodeType&&(r=e.defaultView),void 0===n)return r?r[i]:e[t];r?r.scrollTo(o?r.pageXOffset:n,o?n:r.pageYOffset):e[t]=n},t,e,arguments.length)}}),S.each(["top","left"],function(e,n){S.cssHooks[n]=Fe(y.pixelPosition,function(e,t){if(t)return t=We(e,n),Pe.test(t)?S(e).position()[n]+"px":t})}),S.each({Height:"height",Width:"width"},function(a,s){S.each({padding:"inner"+a,content:s,"":"outer"+a},function(r,o){S.fn[o]=function(e,t){var n=arguments.length&&(r||"boolean"!=typeof e),i=r||(!0===e||!0===t?"margin":"border");return $(this,function(e,t,n){var r;return x(e)?0===o.indexOf("outer")?e["inner"+a]:e.document.documentElement["client"+a]:9===e.nodeType?(r=e.documentElement,Math.max(e.body["scroll"+a],r["scroll"+a],e.body["offset"+a],r["offset"+a],r["client"+a])):void 0===n?S.css(e,t,i):S.style(e,t,n,i)},s,n?e:void 0,n)}})}),S.each(["ajaxStart","ajaxStop","ajaxComplete","ajaxError","ajaxSuccess","ajaxSend"],function(e,t){S.fn[t]=function(e){return this.on(t,e)}}),S.fn.extend({bind:function(e,t,n){return this.on(e,null,t,n)},unbind:function(e,t){return this.off(e,null,t)},delegate:function(e,t,n,r){return this.on(t,e,n,r)},undelegate:function(e,t,n){return 1===arguments.length?this.off(e,"**"):this.off(t,e||"**",n)},hover:function(e,t){return this.mouseenter(e).mouseleave(t||e)}}),S.each("blur focus focusin focusout resize scroll click dblclick mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave change select submit keydown keypress keyup contextmenu".split(" "),function(e,n){S.fn[n]=function(e,t){return 0"),n("table.docutils.footnote").wrap("
"),n("table.docutils.citation").wrap("
"),n(".wy-menu-vertical ul").not(".simple").siblings("a").each((function(){var t=n(this);expand=n(''),expand.on("click",(function(n){return e.toggleCurrent(t),n.stopPropagation(),!1})),t.prepend(expand)}))},reset:function(){var n=encodeURI(window.location.hash)||"#";try{var e=$(".wy-menu-vertical"),t=e.find('[href="'+n+'"]');if(0===t.length){var i=$('.document [id="'+n.substring(1)+'"]').closest("div.section");0===(t=e.find('[href="#'+i.attr("id")+'"]')).length&&(t=e.find('[href="#"]'))}if(t.length>0){$(".wy-menu-vertical .current").removeClass("current").attr("aria-expanded","false"),t.addClass("current").attr("aria-expanded","true"),t.closest("li.toctree-l1").parent().addClass("current").attr("aria-expanded","true");for(let n=1;n<=10;n++)t.closest("li.toctree-l"+n).addClass("current").attr("aria-expanded","true");t[0].scrollIntoView()}}catch(n){console.log("Error expanding nav for anchor",n)}},onScroll:function(){this.winScroll=!1;var n=this.win.scrollTop(),e=n+this.winHeight,t=this.navBar.scrollTop()+(n-this.winPosition);n<0||e>this.docHeight||(this.navBar.scrollTop(t),this.winPosition=n)},onResize:function(){this.winResize=!1,this.winHeight=this.win.height(),this.docHeight=$(document).height()},hashChange:function(){this.linkScroll=!0,this.win.one("hashchange",(function(){this.linkScroll=!1}))},toggleCurrent:function(n){var e=n.closest("li");e.siblings("li.current").removeClass("current").attr("aria-expanded","false"),e.siblings().find("li.current").removeClass("current").attr("aria-expanded","false");var t=e.find("> ul li");t.length&&(t.removeClass("current").attr("aria-expanded","false"),e.toggleClass("current").attr("aria-expanded",(function(n,e){return"true"==e?"false":"true"})))}},"undefined"!=typeof window&&(window.SphinxRtdTheme={Navigation:n.exports.ThemeNav,StickyNav:n.exports.ThemeNav}),function(){for(var n=0,e=["ms","moz","webkit","o"],t=0;t + + + + + + + Logging in - Introduction to Kebnekaise + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
    +
  • + +
  • +
  • +
+
+
+
+
+ +

Logging in

+

When you have your account, you can login to Kebnekaise. This can be done with any number of SSH clients or with ThinLinc (the easiest option if you need a graphical interface).

+
+

Objectives

+
    +
  • Login to Kebnekaise, either with ThinLinc or your SSH client of choice.
  • +
+
+

Kebnekaise login servers

+
+

Note

+
    +
  • The main login node of Kebnekaise: kebnekaise.hpc2n.umu.se
  • +
  • ThinLinc login node: kebnekaise-tl.hpc2n.umu.se
      +
    • ThinLinc through a browser (less features): https://kebnekaise-tl.hpc2n.umu.se:300/
    • +
    +
  • +
+
+

In addition, there is a login node for the AMD-based nodes. We will talk more about this later: kebnekaise-amd.hpc2n.umu.se. For ThinLinc access: kebnekaise-amd-tl.hpc2n.umu.se

+
+

ThinLinc is recommended for this course

+

ThinLinc: a cross-platform remote desktop server from Cendio AB. Especially useful when you need software with a graphical interface.

+

This is what we recommend you use for this course, unless you have a preferred SSH client.

+
+

Using ThinLinc

+
    +
  • Download the client from https://www.cendio.com/thinlinc/download. Install it.
  • +
  • Start the client. Enter the name of the server: kebnekaise-tl.hpc2n.umu.se. Enter your username. +
    +thinlinc +
  • +
  • Go to “Options” \(->\) “Security”. Check that authentication method is set to password.
  • +
  • Go to “Options” \(->\) “Screen”. Uncheck “Full screen mode”.
  • +
  • Enter your HPC2N password. Click “Connect”
  • +
  • Click “Continue” when you are being told that the server’s host key is not in the registry. Wait for the ThinLinc desktop to open.
  • +
+
+

Exercise

+

Login to Kebnekaise.

+
    +
  • If you are using ThinLinc, first install the ThinLinc client. If you are using another SSH client, install it first if you have not already done so.
  • +
+
+

Change password

+

You get your first, temporary HPC2N password from this page: HPC2N passwords.

+

That page can also be used to reset your HPC2N password if you have forgotten it.

+

Note that you are authenticating through SUPR, using that service’s login credentials!

+
+

Warning

+

The HPC2N password and the SUPR password are separate! The HPC2N password and your university/department password are also separate!

+
+
+

Exercise: Change your password after first login

+

ONLY do this if you have logged in for the first time/is still using the termporary password you got from the HPC2N password reset service!

+

Changing password is done using the passwd command:

+
passwd
+
+

Use a good password that combines letters of different case. Do not use dictionary words. Avoid using the same password that you also use in other places.

+

It will first ask for your current password. Type in that and press enter. Then type in the new password, enter, and repeat. You have changed the password.

+
+

File transfers

+

We are not going to transfer any files as part of this course, but you may have to do so as part of your workflow when using Kebnekaise (or another HPC centre) for your research.

+

Linux, OS X

+

scp

+

SCP (Secure CoPy) is a simple way of transferring files between two machines that use the SSH (Secure SHell) protocol. You may use SCP to connect to any system where you have SSH (log-in) access.

+

These examples show how to use scp from the command-line. Graphical programs exists for doing scp transfer.

+

The command-lone scp program should already be installed.

+
+

Remote to local

+

Transfer a file from Kebnekaise to your local system, while on your local system

+
scp username@kebnekaise.hpc2n.umu.se:file .
+
+
+
+

Local to remote

+

Transfer a local file to Kebnekaise, while on your local system

+
scp file username@kebnekaise.hpc2n.umu.se:file
+
+
+
+

Recursive directory copy from a local system to a remote system

+

The directory sourcedirectory is here copied as a subdirectory to somedir

+
scp -r sourcedirectory/ username@kebnekaise.hpc2n.umu.se:somedir/
+
+
+

sftp

+

SFTP (SSH File Transfer Protocol or sometimes called Secure File Transfer Protocol) is a network protocol that provides file transfer over a reliable data stream.

+

SFTP is a command -line program on most Unix, Linux, and Mac OS X systems. It is also available as a protocol choice in some graphical file transfer programs.

+
+

Example: From a local system to a remote system

+
enterprise-d [~]$ sftp user@kebnekaise.hpc2n.umu.se
+Connecting to kebnekaise.hpc2n.umu.se...
+user@kebnekaise.hpc2n.umu.se's password:
+sftp> put file.c C/file.c
+Uploading file.c to /home/u/user/C/file.c
+file.c                          100%    1    0.0KB/s   00:00
+sftp> put -P irf.png pic/
+Uploading irf.png to /home/u/user/pic/irf.png
+irf.png                         100% 2100    2.1KB/s   00:00
+sftp>
+
+
+

Windows

+

Here you need to download a client: WinSCP, FileZilla (sftp), PSCP/PSFTP, …

+

You can transfer with sftp or scp.

+

There is documentation in HPC2N’s documentation pages for Windows file transfers.

+

Editors

+

Since the editors on a Linux system are different to those you may be familiar with from Windows or macOS, here follows a short overview.

+

There are command-line editors and graphical editors. If you are connecting with a regular SSH client, it will be simplest to use a command-line editor. If you are using ThinLinc, you can use command-line editors or graphical editors as you want.

+

Command-line

+

These are all good editors for using on the command line:

+ +

They are all installed on Kebnekaise.

+

Of these, vi/vim as well as emacs are probably the most powerful, though the latter is better in a GUI environment. The easiest editor to use if you are not familiar with any of them is nano.

+
+

Nano

+
    +
  1. Starting “nano”: Type nano FILENAME on the command line and press Enter. FILENAME is whatever you want to call your file.
  2. +
  3. If FILENAME is a file that already exists, nano will open the file. If it dows not exist, it will be created.
  4. +
  5. You now get an editor that looks like this: +nano editor
  6. +
  7. First thing to notice is that many of the commands are listed at the bottom.
  8. +
  9. The ^ before the letter-commands means you should press CTRL and then the letter (while keeping CTRL down).
  10. +
  11. Your prompt is in the editor window itself, and you can just type (or copy and paste) the content you want in your file.
  12. +
  13. When you want to exit (and possibly save), you press CTRL and then x while holding CTRL down (this is written CTRL-x or ^x). nano will ask you if you want to save the content of the buffer to the file. After that it will exit.
  14. +
+
+

There is a manual for nano here.

+

GUI

+

If you are connecting with ThinLinc, you will be presented with a graphical user interface (GUI).

+

From there you can either

+
    +
  • open a terminal window/shell (Applications -> System Tools -> MATE Terminal)
  • +
  • or you can choose editors from the menu by going to Applications -> Accessories. This gives several editor options, of which these have a graphical interface: +
  • +
+

If you are not familiar with any of these, a good recommendation would be to use Text Editor/gedit.

+
+

Text Editor/gedit

+
    +
  1. Starting “gedit”:
      +
    • From the menu, choose Applications -> Accessories -> Text Editor.
    • +
    +
  2. +
  3. You then get a window that looks like this: +gedit editor
  4. +
  5. You can open files by clicking “Open” in the top menu.
  6. +
  7. Clicking the small file icon with a green plus will create a new document.
  8. +
  9. Save by clicking “Save” in the menu.
  10. +
  11. The menu on the top right (the three horizontal lines) gives you several other options, including “Find” and “Find and Replace”.
  12. +
+
+
+

Keypoints

+
    +
  • You can login with ThinLinc or another SSH client
  • +
  • ThinLinc is easiest if you need a GUI
  • +
  • There are several command-line editors: vi/vim, nano, emacs, …
  • +
  • And several GUI editors, which works best when using ThinLinc: gedit, pluma, atom, emacs (gui), nedit, …
  • +
+
+ +
+
+ +
+
+ +
+ +
+ +
+ + + + « Previous + + + Next » + + +
+ + + + + + + + + + + diff --git a/modules/index.html b/modules/index.html new file mode 100644 index 00000000..c06012f9 --- /dev/null +++ b/modules/index.html @@ -0,0 +1,475 @@ + + + + + + + + The Module System - Introduction to Kebnekaise + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
    +
  • + +
  • +
  • +
+
+
+
+
+ +

The Module System (Lmod)

+
+

Objectives

+
    +
  • Learn the basics of the module system which is used to access most of the software on Kebnekaise
  • +
  • Try some of the most used commands for the module system:
      +
    • find/list software modules
    • +
    • load/unload software modules
    • +
    +
  • +
  • Learn about compiler toolchains
  • +
+
+

Most programs are accessed by first loading them as a ‘module’.

+

Modules are:

+
    +
  • used to set up your environment (paths to executables, libraries, etc.) for using a particular (set of) software package(s)
  • +
  • a tool to help users manage their Unix/Linux shell environment, allowing groups of related environment-variable settings to be made or removed dynamically
  • +
  • allows having multiple versions of a program or package available by just loading the proper module
  • +
  • are installed in a hierarchial layout. This means that some modules are only available after loading a specific compiler and/or MPI version.
  • +
+

Useful commands (Lmod)

+
    +
  • See which modules exists:
      +
    • module spider or ml spider
    • +
    +
  • +
  • See which versions exist of a specific module:
      +
    • module spider MODULE or ml spider MODULE
    • +
    +
  • +
  • See prerequisites and how to load a specfic version of a module:
      +
    • module spider MODULE/VERSION or ml spider MODULE/VERSION
    • +
    +
  • +
  • List modules depending only on what is currently loaded:
      +
    • module avail or ml av
    • +
    +
  • +
  • See which modules are currently loaded:
      +
    • module list or ml
    • +
    +
  • +
  • Loading a module:
      +
    • module load MODULE or ml MODULE
    • +
    +
  • +
  • Loading a specific version of a module:
      +
    • module load MODULE/VERSION or ml MODULE/VERSION
    • +
    +
  • +
  • Unload a module:
      +
    • module unload MODULE or ml -MODULE
    • +
    +
  • +
  • Get more information about a module:
      +
    • ml show MODULE or module show MODULE
    • +
    +
  • +
  • Unload all modules except the ‘sticky’ modules:
      +
    • module purge or ml purge
    • +
    +
  • +
+
+

Hint

+

Code-along!

+
+
+Example: checking which versions exist of the module ‘Python’ +
b-an01 [~]$ ml spider Python
+
+---------------------------------------------------------------------------------------------------------
+  Python:
+---------------------------------------------------------------------------------------------------------
+    Description:
+      Python is a programming language that lets you work more quickly and integrate your systems more effectively.
+
+     Versions:
+        Python/2.7.15
+        Python/2.7.16
+        Python/2.7.18-bare
+        Python/2.7.18
+        Python/3.7.2
+        Python/3.7.4
+        Python/3.8.2
+        Python/3.8.6
+        Python/3.9.5-bare
+        Python/3.9.5
+        Python/3.9.6-bare
+        Python/3.9.6
+        Python/3.10.4-bare
+        Python/3.10.4
+        Python/3.10.8-bare
+        Python/3.10.8
+        Python/3.11.3
+        Python/3.11.5
+     Other possible modules matches:
+        Biopython  Boost.Python  Brotli-python  GitPython  IPython  Python-bundle-PyPI  flatbuffers-python  ...
+
+---------------------------------------------------------------------------------------------------------
+  To find other possible module matches execute:
+
+      $ module -r spider '.*Python.*'
+
+---------------------------------------------------------------------------------------------------------
+  For detailed information about a specific "Python" package (including how to load the modules) use the module's full name.
+  Note that names that have a trailing (E) are extensions provided by other modules.
+  For example:
+
+     $ module spider Python/3.11.5
+---------------------------------------------------------------------------------------------------------
+
+
+
+b-an01 [~]$ 
+
+
+
+Example: Check how to load a specific Python version (3.11.5 in this example) +
b-an01 [~]$ ml spider Python/3.11.5
+
+---------------------------------------------------------------------------------------------------------
+  Python: Python/3.11.5
+---------------------------------------------------------------------------------------------------------
+    Description:
+      Python is a programming language that lets you work more quickly and integrate your systems more effectively.
+
+    You will need to load all module(s) on any one of the lines below before the "Python/3.11.5" module is available to load.
+
+      GCCcore/13.2.0
+
+    This module provides the following extensions:
+
+       flit_core/3.9.0 (E), packaging/23.2 (E), pip/23.2.1 (E), setuptools-scm/8.0.4 (E), setuptools/68.2.2 (E), tomli/2.0.1 (E), typing_extensions/4.8.0 (E), wheel/0.41.2 (E)
+
+    Help:
+      Description
+      ===========
+      Python is a programming language that lets you work more quickly and integrate your systems more effectively.
+
+      More information
+      ================
+       - Homepage: https://python.org/
+
+
+      Included extensions
+      ===================
+      flit_core-3.9.0, packaging-23.2, pip-23.2.1, setuptools-68.2.2, setuptools-
+      scm-8.0.4, tomli-2.0.1, typing_extensions-4.8.0, wheel-0.41.2
+
+
+
+
+
+b-an01 [~]$ 
+
+
+
+Example: Load Python/3.11.5 and its prerequisite(s) +

Here we also show the loaded module before and after the load. For illustration, we use first ml and then module list:

+
b-an01 [~]$ ml
+
+Currently Loaded Modules:
+  1) snicenvironment (S)   2) systemdefault (S)
+
+ Where:
+   S:  Module is Sticky, requires --force to unload or purge
+
+
+
+b-an01 [~]$ module load GCCcore/13.2.0 Python/3.11.5
+b-an01 [~]$ module list
+
+Currently Loaded Modules:
+  1) snicenvironment (S)   4) zlib/1.2.13     7) ncurses/6.4      10) SQLite/3.43.1  13) OpenSSL/1.1
+  2) systemdefault   (S)   5) binutils/2.40   8) libreadline/8.2  11) XZ/5.4.4       14) Python/3.11.5
+  3) GCCcore/13.2.0        6) bzip2/1.0.8     9) Tcl/8.6.13       12) libffi/3.4.4
+
+  Where:
+   S:  Module is Sticky, requires --force to unload or purge
+
+
+
+b-an01 [~]$ 
+
+
+
+Example: Unloading the module Python/3.11.5 +

In this example we unload the module Python/3.11.5, but not the prerequisite GCCcore/13.2.0. We also look at the output of module list before and after.

+
b-an01 [~]$ module list
+
+Currently Loaded Modules:
+  1) snicenvironment (S)   4) zlib/1.2.13     7) ncurses/6.4      10) SQLite/3.43.1  13) OpenSSL/1.1
+  2) systemdefault   (S)   5) binutils/2.40   8) libreadline/8.2  11) XZ/5.4.4       14) Python/3.11.5
+  3) GCCcore/13.2.0        6) bzip2/1.0.8     9) Tcl/8.6.13       12) libffi/3.4.4
+
+  Where:
+   S:  Module is Sticky, requires --force to unload or purge
+
+
+b-an01 [~]$ ml unload Python/3.11.5
+b-an01 [~]$ module list
+
+Currently Loaded Modules:
+  1) snicenvironment (S)   2) systemdefault (S)   3) GCCcore/13.2.0
+
+  Where:
+   S:  Module is Sticky, requires --force to unload or purge
+
+
+
+b-an01 [~]$ 
+
+

As you can see, the prerequisite did not get unloaded. This is on purpose, because you may have other things loaded which uses the prerequisite.

+
+
+Example: unloading every module you have loaded, with module purge except the ‘sticky’ modules (some needed things for the environment) +

First we load some modules. Here Python 3.11.5, SciPy-bundle, and prerequisites for them. We also do module list after loading the modules and after using module purge.

+
b-an01 [~]$ ml GCC/13.2.0 
+b-an01 [~]$ ml Python/3.11.5 ml SciPy-bundle/2023.11 
+b-an01 [~]$ ml list
+
+Currently Loaded Modules:
+  1) snicenvironment (S)   7) bzip2/1.0.8      13) libffi/3.4.4     19) cffi/1.15.1
+  2) systemdefault   (S)   8) ncurses/6.4      14) OpenSSL/1.1      20) cryptography/41.0.5
+   3) GCCcore/13.2.0        9) libreadline/8.2  15) Python/3.11.5    21) virtualenv/20.24.6
+   4) zlib/1.2.13          10) Tcl/8.6.13       16) OpenBLAS/0.3.24  22) Python-bundle-PyPI/2023.10
+   5) binutils/2.40        11) SQLite/3.43.1    17) FlexiBLAS/3.3.1  23) pybind11/2.11.1
+   6) GCC/13.2.0           12) XZ/5.4.4         18) FFTW/3.3.10      24) SciPy-bundle/2023.11
+
+  Where:
+   S:  Module is Sticky, requires --force to unload or purge
+
+
+
+b-an01 [~]$ ml purge
+The following modules were not unloaded:
+  (Use "module --force purge" to unload all):
+
+  1) snicenvironment   2) systemdefault
+b-an01 [~]$ ml list
+
+Currently Loaded Modules:
+  1) snicenvironment (S)   2) systemdefault (S)
+
+  Where:
+   S:  Module is Sticky, requires --force to unload or purge
+
+
+
+b-an01 [~]$ 
+
+
+
+

Note

+
    +
  • You can do several module load on the same line. Or you can do them one at a time, as you want.
      +
    • The modules have to be loaded in order! You cannot list the prerequisite after the module that needs it!
    • +
    +
  • +
  • One advantage to loading modules one at a time is that you can then find compatible modules that depend on that version easily.
      +
    • Example: you have loaded GCC/13.2.0 and Python/3.11.5. You can now do ml av to see which versions of other modules you want to load, say SciPy-bundle, are compatible. If you know the name of the module you want, you can even start writing module load SciPy-bundle/ and press TAB - the system will then autocomplete to the compatible one(s).
    • +
    +
  • +
+
+

Compiler Toolchains

+

Compiler toolchains load bundles of software making up a complete environment for compiling/using a specific prebuilt software. Includes some/all of: compiler suite, MPI, BLAS, LAPACK, ScaLapack, FFTW, CUDA.

+

Some currently available toolchains (check ml av for versions and full, updated list):

+
    +
  • GCC: GCC only
  • +
  • gcccuda: GCC and CUDA
  • +
  • foss: GCC, OpenMPI, OpenBLAS/LAPACK, FFTW, ScaLAPACK
  • +
  • gompi: GCC, OpenMPI
  • +
  • gompic: GCC, OpenMPI, CUDA
  • +
  • gomkl: GCC, OpenMPI, MKL
  • +
  • iccifort: icc, ifort
  • +
  • iccifortcuda: icc, ifort, CUDA
  • +
  • iimpi: icc, ifort, IntelMPI
  • +
  • iimpic: iccifort, CUDA, impi
  • +
  • intel: icc, ifort, IntelMPI, IntelMKL
  • +
  • intel-compilers: icc, ifort (classic and oneAPI)
  • +
  • intelcuda: intel and CUDA
  • +
  • iompi: iccifort and OpenMPI
  • +
+
+

Keypoints

+
    +
  • The software on Kebnekaise is mostly accessed through the module system.
  • +
  • The modules are arranged in a hierarchial layout; many modules have prerequisites that needs to be loaded first.
  • +
  • Important commands to the module system:
      +
    • Loading: module load MODULE
    • +
    • Unloading: module unload MODULE
    • +
    • Unload all modules: module purge
    • +
    • List all modules in the system: module spider
    • +
    • List versions available of a specific module: module spider MODULE
    • +
    • Show how to load a specific module and version: module spider MODULE/VERSION
    • +
    • List the modules you have currently loaded: module list
    • +
    +
  • +
  • Compiler toolchains are modules containing compiler suites and various libraries
  • +
+
+
+

More information

+ +
+ +
+
+ +
+
+ +
+ +
+ +
+ + + + « Previous + + + Next » + + +
+ + + + + + + + + + + diff --git a/projectsaccounts/index.html b/projectsaccounts/index.html new file mode 100644 index 00000000..0d5093ae --- /dev/null +++ b/projectsaccounts/index.html @@ -0,0 +1,225 @@ + + + + + + + + Projects and Accounts - Introduction to Kebnekaise + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
    +
  • + +
  • +
  • +
+
+
+
+
+ +

Projects - compute and storage

+
+

Note

+

In order to have an account at HPC2N, you need to be a member of a compute project.

+

You can either join a project or apply for one yourself (if you fulfill the requirements).

+
+

There are both storage projects and compute projects. The storage projects are for when the amount of storage included with the compute project is not enough.

+
+

Kebnekaise is only open for local project requests!

+
    +
  • The PI must be affiliated with UmU, LTU, IRF, MiUN, or SLU.
  • +
  • You can still add members (join) from anywhere.
  • +
+
+

Application process

+

Apply for compute projects in SUPR.

+ +
+

Info

+
    +
  • As default, you have 25GB in your home directory.
  • +
  • If you need more, you/your PI can accept the “default storage” you will be offered after applying for compute resources.
  • +
  • The default storage is 500GB.
  • +
  • If you need more than that, you/your PI will have to apply for a storage project.
  • +
  • When you have both, link them together. It is done from the storage project.
  • +
  • This way all members of the compute project also becomes members of the storage project.
  • +
+
+

After applying on SUPR, the project(s) will be reviewed.

+

Linking a compute project to a storage project

+
    +
  1. Before linking (SUPR):
  2. +
+

to-link +

+2. Pick a compute project to link:

+

choose +
+3. Showing linked projects:

+

linked +

+4. Members of the storage project after linking:

+

storage-members +

+

Accounts

+

When you have a project / have become member of a project, you can apply for an account at HPC2N. This is done in SUPR, under “Accounts”: https://supr.naiss.se/account/.

+

Your account request will be processed within a week. You will then get an email with information about logging in and links to getting started information.

+

More information on the account process can be found on HPC2N’s documentation pages: https://www.hpc2n.umu.se/documentation/access-and-accounts/users

+ +
+
+ +
+
+ +
+ +
+ +
+ + + + « Previous + + + Next » + + +
+ + + + + + + + + + + diff --git a/search.html b/search.html new file mode 100644 index 00000000..dda9fcdd --- /dev/null +++ b/search.html @@ -0,0 +1,155 @@ + + + + + + + + Introduction to Kebnekaise + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
    +
  • +
  • +
  • +
+
+
+
+
+ + +

Search Results

+ + + +
+ Searching... +
+ + +
+
+ +
+
+ +
+ +
+ +
+ + + + + +
+ + + + + + + + + + + diff --git a/search/lunr.js b/search/lunr.js new file mode 100644 index 00000000..aca0a167 --- /dev/null +++ b/search/lunr.js @@ -0,0 +1,3475 @@ +/** + * lunr - http://lunrjs.com - A bit like Solr, but much smaller and not as bright - 2.3.9 + * Copyright (C) 2020 Oliver Nightingale + * @license MIT + */ + +;(function(){ + +/** + * A convenience function for configuring and constructing + * a new lunr Index. + * + * A lunr.Builder instance is created and the pipeline setup + * with a trimmer, stop word filter and stemmer. + * + * This builder object is yielded to the configuration function + * that is passed as a parameter, allowing the list of fields + * and other builder parameters to be customised. + * + * All documents _must_ be added within the passed config function. + * + * @example + * var idx = lunr(function () { + * this.field('title') + * this.field('body') + * this.ref('id') + * + * documents.forEach(function (doc) { + * this.add(doc) + * }, this) + * }) + * + * @see {@link lunr.Builder} + * @see {@link lunr.Pipeline} + * @see {@link lunr.trimmer} + * @see {@link lunr.stopWordFilter} + * @see {@link lunr.stemmer} + * @namespace {function} lunr + */ +var lunr = function (config) { + var builder = new lunr.Builder + + builder.pipeline.add( + lunr.trimmer, + lunr.stopWordFilter, + lunr.stemmer + ) + + builder.searchPipeline.add( + lunr.stemmer + ) + + config.call(builder, builder) + return builder.build() +} + +lunr.version = "2.3.9" +/*! + * lunr.utils + * Copyright (C) 2020 Oliver Nightingale + */ + +/** + * A namespace containing utils for the rest of the lunr library + * @namespace lunr.utils + */ +lunr.utils = {} + +/** + * Print a warning message to the console. + * + * @param {String} message The message to be printed. + * @memberOf lunr.utils + * @function + */ +lunr.utils.warn = (function (global) { + /* eslint-disable no-console */ + return function (message) { + if (global.console && console.warn) { + console.warn(message) + } + } + /* eslint-enable no-console */ +})(this) + +/** + * Convert an object to a string. + * + * In the case of `null` and `undefined` the function returns + * the empty string, in all other cases the result of calling + * `toString` on the passed object is returned. + * + * @param {Any} obj The object to convert to a string. + * @return {String} string representation of the passed object. + * @memberOf lunr.utils + */ +lunr.utils.asString = function (obj) { + if (obj === void 0 || obj === null) { + return "" + } else { + return obj.toString() + } +} + +/** + * Clones an object. + * + * Will create a copy of an existing object such that any mutations + * on the copy cannot affect the original. + * + * Only shallow objects are supported, passing a nested object to this + * function will cause a TypeError. + * + * Objects with primitives, and arrays of primitives are supported. + * + * @param {Object} obj The object to clone. + * @return {Object} a clone of the passed object. + * @throws {TypeError} when a nested object is passed. + * @memberOf Utils + */ +lunr.utils.clone = function (obj) { + if (obj === null || obj === undefined) { + return obj + } + + var clone = Object.create(null), + keys = Object.keys(obj) + + for (var i = 0; i < keys.length; i++) { + var key = keys[i], + val = obj[key] + + if (Array.isArray(val)) { + clone[key] = val.slice() + continue + } + + if (typeof val === 'string' || + typeof val === 'number' || + typeof val === 'boolean') { + clone[key] = val + continue + } + + throw new TypeError("clone is not deep and does not support nested objects") + } + + return clone +} +lunr.FieldRef = function (docRef, fieldName, stringValue) { + this.docRef = docRef + this.fieldName = fieldName + this._stringValue = stringValue +} + +lunr.FieldRef.joiner = "/" + +lunr.FieldRef.fromString = function (s) { + var n = s.indexOf(lunr.FieldRef.joiner) + + if (n === -1) { + throw "malformed field ref string" + } + + var fieldRef = s.slice(0, n), + docRef = s.slice(n + 1) + + return new lunr.FieldRef (docRef, fieldRef, s) +} + +lunr.FieldRef.prototype.toString = function () { + if (this._stringValue == undefined) { + this._stringValue = this.fieldName + lunr.FieldRef.joiner + this.docRef + } + + return this._stringValue +} +/*! + * lunr.Set + * Copyright (C) 2020 Oliver Nightingale + */ + +/** + * A lunr set. + * + * @constructor + */ +lunr.Set = function (elements) { + this.elements = Object.create(null) + + if (elements) { + this.length = elements.length + + for (var i = 0; i < this.length; i++) { + this.elements[elements[i]] = true + } + } else { + this.length = 0 + } +} + +/** + * A complete set that contains all elements. + * + * @static + * @readonly + * @type {lunr.Set} + */ +lunr.Set.complete = { + intersect: function (other) { + return other + }, + + union: function () { + return this + }, + + contains: function () { + return true + } +} + +/** + * An empty set that contains no elements. + * + * @static + * @readonly + * @type {lunr.Set} + */ +lunr.Set.empty = { + intersect: function () { + return this + }, + + union: function (other) { + return other + }, + + contains: function () { + return false + } +} + +/** + * Returns true if this set contains the specified object. + * + * @param {object} object - Object whose presence in this set is to be tested. + * @returns {boolean} - True if this set contains the specified object. + */ +lunr.Set.prototype.contains = function (object) { + return !!this.elements[object] +} + +/** + * Returns a new set containing only the elements that are present in both + * this set and the specified set. + * + * @param {lunr.Set} other - set to intersect with this set. + * @returns {lunr.Set} a new set that is the intersection of this and the specified set. + */ + +lunr.Set.prototype.intersect = function (other) { + var a, b, elements, intersection = [] + + if (other === lunr.Set.complete) { + return this + } + + if (other === lunr.Set.empty) { + return other + } + + if (this.length < other.length) { + a = this + b = other + } else { + a = other + b = this + } + + elements = Object.keys(a.elements) + + for (var i = 0; i < elements.length; i++) { + var element = elements[i] + if (element in b.elements) { + intersection.push(element) + } + } + + return new lunr.Set (intersection) +} + +/** + * Returns a new set combining the elements of this and the specified set. + * + * @param {lunr.Set} other - set to union with this set. + * @return {lunr.Set} a new set that is the union of this and the specified set. + */ + +lunr.Set.prototype.union = function (other) { + if (other === lunr.Set.complete) { + return lunr.Set.complete + } + + if (other === lunr.Set.empty) { + return this + } + + return new lunr.Set(Object.keys(this.elements).concat(Object.keys(other.elements))) +} +/** + * A function to calculate the inverse document frequency for + * a posting. This is shared between the builder and the index + * + * @private + * @param {object} posting - The posting for a given term + * @param {number} documentCount - The total number of documents. + */ +lunr.idf = function (posting, documentCount) { + var documentsWithTerm = 0 + + for (var fieldName in posting) { + if (fieldName == '_index') continue // Ignore the term index, its not a field + documentsWithTerm += Object.keys(posting[fieldName]).length + } + + var x = (documentCount - documentsWithTerm + 0.5) / (documentsWithTerm + 0.5) + + return Math.log(1 + Math.abs(x)) +} + +/** + * A token wraps a string representation of a token + * as it is passed through the text processing pipeline. + * + * @constructor + * @param {string} [str=''] - The string token being wrapped. + * @param {object} [metadata={}] - Metadata associated with this token. + */ +lunr.Token = function (str, metadata) { + this.str = str || "" + this.metadata = metadata || {} +} + +/** + * Returns the token string that is being wrapped by this object. + * + * @returns {string} + */ +lunr.Token.prototype.toString = function () { + return this.str +} + +/** + * A token update function is used when updating or optionally + * when cloning a token. + * + * @callback lunr.Token~updateFunction + * @param {string} str - The string representation of the token. + * @param {Object} metadata - All metadata associated with this token. + */ + +/** + * Applies the given function to the wrapped string token. + * + * @example + * token.update(function (str, metadata) { + * return str.toUpperCase() + * }) + * + * @param {lunr.Token~updateFunction} fn - A function to apply to the token string. + * @returns {lunr.Token} + */ +lunr.Token.prototype.update = function (fn) { + this.str = fn(this.str, this.metadata) + return this +} + +/** + * Creates a clone of this token. Optionally a function can be + * applied to the cloned token. + * + * @param {lunr.Token~updateFunction} [fn] - An optional function to apply to the cloned token. + * @returns {lunr.Token} + */ +lunr.Token.prototype.clone = function (fn) { + fn = fn || function (s) { return s } + return new lunr.Token (fn(this.str, this.metadata), this.metadata) +} +/*! + * lunr.tokenizer + * Copyright (C) 2020 Oliver Nightingale + */ + +/** + * A function for splitting a string into tokens ready to be inserted into + * the search index. Uses `lunr.tokenizer.separator` to split strings, change + * the value of this property to change how strings are split into tokens. + * + * This tokenizer will convert its parameter to a string by calling `toString` and + * then will split this string on the character in `lunr.tokenizer.separator`. + * Arrays will have their elements converted to strings and wrapped in a lunr.Token. + * + * Optional metadata can be passed to the tokenizer, this metadata will be cloned and + * added as metadata to every token that is created from the object to be tokenized. + * + * @static + * @param {?(string|object|object[])} obj - The object to convert into tokens + * @param {?object} metadata - Optional metadata to associate with every token + * @returns {lunr.Token[]} + * @see {@link lunr.Pipeline} + */ +lunr.tokenizer = function (obj, metadata) { + if (obj == null || obj == undefined) { + return [] + } + + if (Array.isArray(obj)) { + return obj.map(function (t) { + return new lunr.Token( + lunr.utils.asString(t).toLowerCase(), + lunr.utils.clone(metadata) + ) + }) + } + + var str = obj.toString().toLowerCase(), + len = str.length, + tokens = [] + + for (var sliceEnd = 0, sliceStart = 0; sliceEnd <= len; sliceEnd++) { + var char = str.charAt(sliceEnd), + sliceLength = sliceEnd - sliceStart + + if ((char.match(lunr.tokenizer.separator) || sliceEnd == len)) { + + if (sliceLength > 0) { + var tokenMetadata = lunr.utils.clone(metadata) || {} + tokenMetadata["position"] = [sliceStart, sliceLength] + tokenMetadata["index"] = tokens.length + + tokens.push( + new lunr.Token ( + str.slice(sliceStart, sliceEnd), + tokenMetadata + ) + ) + } + + sliceStart = sliceEnd + 1 + } + + } + + return tokens +} + +/** + * The separator used to split a string into tokens. Override this property to change the behaviour of + * `lunr.tokenizer` behaviour when tokenizing strings. By default this splits on whitespace and hyphens. + * + * @static + * @see lunr.tokenizer + */ +lunr.tokenizer.separator = /[\s\-]+/ +/*! + * lunr.Pipeline + * Copyright (C) 2020 Oliver Nightingale + */ + +/** + * lunr.Pipelines maintain an ordered list of functions to be applied to all + * tokens in documents entering the search index and queries being ran against + * the index. + * + * An instance of lunr.Index created with the lunr shortcut will contain a + * pipeline with a stop word filter and an English language stemmer. Extra + * functions can be added before or after either of these functions or these + * default functions can be removed. + * + * When run the pipeline will call each function in turn, passing a token, the + * index of that token in the original list of all tokens and finally a list of + * all the original tokens. + * + * The output of functions in the pipeline will be passed to the next function + * in the pipeline. To exclude a token from entering the index the function + * should return undefined, the rest of the pipeline will not be called with + * this token. + * + * For serialisation of pipelines to work, all functions used in an instance of + * a pipeline should be registered with lunr.Pipeline. Registered functions can + * then be loaded. If trying to load a serialised pipeline that uses functions + * that are not registered an error will be thrown. + * + * If not planning on serialising the pipeline then registering pipeline functions + * is not necessary. + * + * @constructor + */ +lunr.Pipeline = function () { + this._stack = [] +} + +lunr.Pipeline.registeredFunctions = Object.create(null) + +/** + * A pipeline function maps lunr.Token to lunr.Token. A lunr.Token contains the token + * string as well as all known metadata. A pipeline function can mutate the token string + * or mutate (or add) metadata for a given token. + * + * A pipeline function can indicate that the passed token should be discarded by returning + * null, undefined or an empty string. This token will not be passed to any downstream pipeline + * functions and will not be added to the index. + * + * Multiple tokens can be returned by returning an array of tokens. Each token will be passed + * to any downstream pipeline functions and all will returned tokens will be added to the index. + * + * Any number of pipeline functions may be chained together using a lunr.Pipeline. + * + * @interface lunr.PipelineFunction + * @param {lunr.Token} token - A token from the document being processed. + * @param {number} i - The index of this token in the complete list of tokens for this document/field. + * @param {lunr.Token[]} tokens - All tokens for this document/field. + * @returns {(?lunr.Token|lunr.Token[])} + */ + +/** + * Register a function with the pipeline. + * + * Functions that are used in the pipeline should be registered if the pipeline + * needs to be serialised, or a serialised pipeline needs to be loaded. + * + * Registering a function does not add it to a pipeline, functions must still be + * added to instances of the pipeline for them to be used when running a pipeline. + * + * @param {lunr.PipelineFunction} fn - The function to check for. + * @param {String} label - The label to register this function with + */ +lunr.Pipeline.registerFunction = function (fn, label) { + if (label in this.registeredFunctions) { + lunr.utils.warn('Overwriting existing registered function: ' + label) + } + + fn.label = label + lunr.Pipeline.registeredFunctions[fn.label] = fn +} + +/** + * Warns if the function is not registered as a Pipeline function. + * + * @param {lunr.PipelineFunction} fn - The function to check for. + * @private + */ +lunr.Pipeline.warnIfFunctionNotRegistered = function (fn) { + var isRegistered = fn.label && (fn.label in this.registeredFunctions) + + if (!isRegistered) { + lunr.utils.warn('Function is not registered with pipeline. This may cause problems when serialising the index.\n', fn) + } +} + +/** + * Loads a previously serialised pipeline. + * + * All functions to be loaded must already be registered with lunr.Pipeline. + * If any function from the serialised data has not been registered then an + * error will be thrown. + * + * @param {Object} serialised - The serialised pipeline to load. + * @returns {lunr.Pipeline} + */ +lunr.Pipeline.load = function (serialised) { + var pipeline = new lunr.Pipeline + + serialised.forEach(function (fnName) { + var fn = lunr.Pipeline.registeredFunctions[fnName] + + if (fn) { + pipeline.add(fn) + } else { + throw new Error('Cannot load unregistered function: ' + fnName) + } + }) + + return pipeline +} + +/** + * Adds new functions to the end of the pipeline. + * + * Logs a warning if the function has not been registered. + * + * @param {lunr.PipelineFunction[]} functions - Any number of functions to add to the pipeline. + */ +lunr.Pipeline.prototype.add = function () { + var fns = Array.prototype.slice.call(arguments) + + fns.forEach(function (fn) { + lunr.Pipeline.warnIfFunctionNotRegistered(fn) + this._stack.push(fn) + }, this) +} + +/** + * Adds a single function after a function that already exists in the + * pipeline. + * + * Logs a warning if the function has not been registered. + * + * @param {lunr.PipelineFunction} existingFn - A function that already exists in the pipeline. + * @param {lunr.PipelineFunction} newFn - The new function to add to the pipeline. + */ +lunr.Pipeline.prototype.after = function (existingFn, newFn) { + lunr.Pipeline.warnIfFunctionNotRegistered(newFn) + + var pos = this._stack.indexOf(existingFn) + if (pos == -1) { + throw new Error('Cannot find existingFn') + } + + pos = pos + 1 + this._stack.splice(pos, 0, newFn) +} + +/** + * Adds a single function before a function that already exists in the + * pipeline. + * + * Logs a warning if the function has not been registered. + * + * @param {lunr.PipelineFunction} existingFn - A function that already exists in the pipeline. + * @param {lunr.PipelineFunction} newFn - The new function to add to the pipeline. + */ +lunr.Pipeline.prototype.before = function (existingFn, newFn) { + lunr.Pipeline.warnIfFunctionNotRegistered(newFn) + + var pos = this._stack.indexOf(existingFn) + if (pos == -1) { + throw new Error('Cannot find existingFn') + } + + this._stack.splice(pos, 0, newFn) +} + +/** + * Removes a function from the pipeline. + * + * @param {lunr.PipelineFunction} fn The function to remove from the pipeline. + */ +lunr.Pipeline.prototype.remove = function (fn) { + var pos = this._stack.indexOf(fn) + if (pos == -1) { + return + } + + this._stack.splice(pos, 1) +} + +/** + * Runs the current list of functions that make up the pipeline against the + * passed tokens. + * + * @param {Array} tokens The tokens to run through the pipeline. + * @returns {Array} + */ +lunr.Pipeline.prototype.run = function (tokens) { + var stackLength = this._stack.length + + for (var i = 0; i < stackLength; i++) { + var fn = this._stack[i] + var memo = [] + + for (var j = 0; j < tokens.length; j++) { + var result = fn(tokens[j], j, tokens) + + if (result === null || result === void 0 || result === '') continue + + if (Array.isArray(result)) { + for (var k = 0; k < result.length; k++) { + memo.push(result[k]) + } + } else { + memo.push(result) + } + } + + tokens = memo + } + + return tokens +} + +/** + * Convenience method for passing a string through a pipeline and getting + * strings out. This method takes care of wrapping the passed string in a + * token and mapping the resulting tokens back to strings. + * + * @param {string} str - The string to pass through the pipeline. + * @param {?object} metadata - Optional metadata to associate with the token + * passed to the pipeline. + * @returns {string[]} + */ +lunr.Pipeline.prototype.runString = function (str, metadata) { + var token = new lunr.Token (str, metadata) + + return this.run([token]).map(function (t) { + return t.toString() + }) +} + +/** + * Resets the pipeline by removing any existing processors. + * + */ +lunr.Pipeline.prototype.reset = function () { + this._stack = [] +} + +/** + * Returns a representation of the pipeline ready for serialisation. + * + * Logs a warning if the function has not been registered. + * + * @returns {Array} + */ +lunr.Pipeline.prototype.toJSON = function () { + return this._stack.map(function (fn) { + lunr.Pipeline.warnIfFunctionNotRegistered(fn) + + return fn.label + }) +} +/*! + * lunr.Vector + * Copyright (C) 2020 Oliver Nightingale + */ + +/** + * A vector is used to construct the vector space of documents and queries. These + * vectors support operations to determine the similarity between two documents or + * a document and a query. + * + * Normally no parameters are required for initializing a vector, but in the case of + * loading a previously dumped vector the raw elements can be provided to the constructor. + * + * For performance reasons vectors are implemented with a flat array, where an elements + * index is immediately followed by its value. E.g. [index, value, index, value]. This + * allows the underlying array to be as sparse as possible and still offer decent + * performance when being used for vector calculations. + * + * @constructor + * @param {Number[]} [elements] - The flat list of element index and element value pairs. + */ +lunr.Vector = function (elements) { + this._magnitude = 0 + this.elements = elements || [] +} + + +/** + * Calculates the position within the vector to insert a given index. + * + * This is used internally by insert and upsert. If there are duplicate indexes then + * the position is returned as if the value for that index were to be updated, but it + * is the callers responsibility to check whether there is a duplicate at that index + * + * @param {Number} insertIdx - The index at which the element should be inserted. + * @returns {Number} + */ +lunr.Vector.prototype.positionForIndex = function (index) { + // For an empty vector the tuple can be inserted at the beginning + if (this.elements.length == 0) { + return 0 + } + + var start = 0, + end = this.elements.length / 2, + sliceLength = end - start, + pivotPoint = Math.floor(sliceLength / 2), + pivotIndex = this.elements[pivotPoint * 2] + + while (sliceLength > 1) { + if (pivotIndex < index) { + start = pivotPoint + } + + if (pivotIndex > index) { + end = pivotPoint + } + + if (pivotIndex == index) { + break + } + + sliceLength = end - start + pivotPoint = start + Math.floor(sliceLength / 2) + pivotIndex = this.elements[pivotPoint * 2] + } + + if (pivotIndex == index) { + return pivotPoint * 2 + } + + if (pivotIndex > index) { + return pivotPoint * 2 + } + + if (pivotIndex < index) { + return (pivotPoint + 1) * 2 + } +} + +/** + * Inserts an element at an index within the vector. + * + * Does not allow duplicates, will throw an error if there is already an entry + * for this index. + * + * @param {Number} insertIdx - The index at which the element should be inserted. + * @param {Number} val - The value to be inserted into the vector. + */ +lunr.Vector.prototype.insert = function (insertIdx, val) { + this.upsert(insertIdx, val, function () { + throw "duplicate index" + }) +} + +/** + * Inserts or updates an existing index within the vector. + * + * @param {Number} insertIdx - The index at which the element should be inserted. + * @param {Number} val - The value to be inserted into the vector. + * @param {function} fn - A function that is called for updates, the existing value and the + * requested value are passed as arguments + */ +lunr.Vector.prototype.upsert = function (insertIdx, val, fn) { + this._magnitude = 0 + var position = this.positionForIndex(insertIdx) + + if (this.elements[position] == insertIdx) { + this.elements[position + 1] = fn(this.elements[position + 1], val) + } else { + this.elements.splice(position, 0, insertIdx, val) + } +} + +/** + * Calculates the magnitude of this vector. + * + * @returns {Number} + */ +lunr.Vector.prototype.magnitude = function () { + if (this._magnitude) return this._magnitude + + var sumOfSquares = 0, + elementsLength = this.elements.length + + for (var i = 1; i < elementsLength; i += 2) { + var val = this.elements[i] + sumOfSquares += val * val + } + + return this._magnitude = Math.sqrt(sumOfSquares) +} + +/** + * Calculates the dot product of this vector and another vector. + * + * @param {lunr.Vector} otherVector - The vector to compute the dot product with. + * @returns {Number} + */ +lunr.Vector.prototype.dot = function (otherVector) { + var dotProduct = 0, + a = this.elements, b = otherVector.elements, + aLen = a.length, bLen = b.length, + aVal = 0, bVal = 0, + i = 0, j = 0 + + while (i < aLen && j < bLen) { + aVal = a[i], bVal = b[j] + if (aVal < bVal) { + i += 2 + } else if (aVal > bVal) { + j += 2 + } else if (aVal == bVal) { + dotProduct += a[i + 1] * b[j + 1] + i += 2 + j += 2 + } + } + + return dotProduct +} + +/** + * Calculates the similarity between this vector and another vector. + * + * @param {lunr.Vector} otherVector - The other vector to calculate the + * similarity with. + * @returns {Number} + */ +lunr.Vector.prototype.similarity = function (otherVector) { + return this.dot(otherVector) / this.magnitude() || 0 +} + +/** + * Converts the vector to an array of the elements within the vector. + * + * @returns {Number[]} + */ +lunr.Vector.prototype.toArray = function () { + var output = new Array (this.elements.length / 2) + + for (var i = 1, j = 0; i < this.elements.length; i += 2, j++) { + output[j] = this.elements[i] + } + + return output +} + +/** + * A JSON serializable representation of the vector. + * + * @returns {Number[]} + */ +lunr.Vector.prototype.toJSON = function () { + return this.elements +} +/* eslint-disable */ +/*! + * lunr.stemmer + * Copyright (C) 2020 Oliver Nightingale + * Includes code from - http://tartarus.org/~martin/PorterStemmer/js.txt + */ + +/** + * lunr.stemmer is an english language stemmer, this is a JavaScript + * implementation of the PorterStemmer taken from http://tartarus.org/~martin + * + * @static + * @implements {lunr.PipelineFunction} + * @param {lunr.Token} token - The string to stem + * @returns {lunr.Token} + * @see {@link lunr.Pipeline} + * @function + */ +lunr.stemmer = (function(){ + var step2list = { + "ational" : "ate", + "tional" : "tion", + "enci" : "ence", + "anci" : "ance", + "izer" : "ize", + "bli" : "ble", + "alli" : "al", + "entli" : "ent", + "eli" : "e", + "ousli" : "ous", + "ization" : "ize", + "ation" : "ate", + "ator" : "ate", + "alism" : "al", + "iveness" : "ive", + "fulness" : "ful", + "ousness" : "ous", + "aliti" : "al", + "iviti" : "ive", + "biliti" : "ble", + "logi" : "log" + }, + + step3list = { + "icate" : "ic", + "ative" : "", + "alize" : "al", + "iciti" : "ic", + "ical" : "ic", + "ful" : "", + "ness" : "" + }, + + c = "[^aeiou]", // consonant + v = "[aeiouy]", // vowel + C = c + "[^aeiouy]*", // consonant sequence + V = v + "[aeiou]*", // vowel sequence + + mgr0 = "^(" + C + ")?" + V + C, // [C]VC... is m>0 + meq1 = "^(" + C + ")?" + V + C + "(" + V + ")?$", // [C]VC[V] is m=1 + mgr1 = "^(" + C + ")?" + V + C + V + C, // [C]VCVC... is m>1 + s_v = "^(" + C + ")?" + v; // vowel in stem + + var re_mgr0 = new RegExp(mgr0); + var re_mgr1 = new RegExp(mgr1); + var re_meq1 = new RegExp(meq1); + var re_s_v = new RegExp(s_v); + + var re_1a = /^(.+?)(ss|i)es$/; + var re2_1a = /^(.+?)([^s])s$/; + var re_1b = /^(.+?)eed$/; + var re2_1b = /^(.+?)(ed|ing)$/; + var re_1b_2 = /.$/; + var re2_1b_2 = /(at|bl|iz)$/; + var re3_1b_2 = new RegExp("([^aeiouylsz])\\1$"); + var re4_1b_2 = new RegExp("^" + C + v + "[^aeiouwxy]$"); + + var re_1c = /^(.+?[^aeiou])y$/; + var re_2 = /^(.+?)(ational|tional|enci|anci|izer|bli|alli|entli|eli|ousli|ization|ation|ator|alism|iveness|fulness|ousness|aliti|iviti|biliti|logi)$/; + + var re_3 = /^(.+?)(icate|ative|alize|iciti|ical|ful|ness)$/; + + var re_4 = /^(.+?)(al|ance|ence|er|ic|able|ible|ant|ement|ment|ent|ou|ism|ate|iti|ous|ive|ize)$/; + var re2_4 = /^(.+?)(s|t)(ion)$/; + + var re_5 = /^(.+?)e$/; + var re_5_1 = /ll$/; + var re3_5 = new RegExp("^" + C + v + "[^aeiouwxy]$"); + + var porterStemmer = function porterStemmer(w) { + var stem, + suffix, + firstch, + re, + re2, + re3, + re4; + + if (w.length < 3) { return w; } + + firstch = w.substr(0,1); + if (firstch == "y") { + w = firstch.toUpperCase() + w.substr(1); + } + + // Step 1a + re = re_1a + re2 = re2_1a; + + if (re.test(w)) { w = w.replace(re,"$1$2"); } + else if (re2.test(w)) { w = w.replace(re2,"$1$2"); } + + // Step 1b + re = re_1b; + re2 = re2_1b; + if (re.test(w)) { + var fp = re.exec(w); + re = re_mgr0; + if (re.test(fp[1])) { + re = re_1b_2; + w = w.replace(re,""); + } + } else if (re2.test(w)) { + var fp = re2.exec(w); + stem = fp[1]; + re2 = re_s_v; + if (re2.test(stem)) { + w = stem; + re2 = re2_1b_2; + re3 = re3_1b_2; + re4 = re4_1b_2; + if (re2.test(w)) { w = w + "e"; } + else if (re3.test(w)) { re = re_1b_2; w = w.replace(re,""); } + else if (re4.test(w)) { w = w + "e"; } + } + } + + // Step 1c - replace suffix y or Y by i if preceded by a non-vowel which is not the first letter of the word (so cry -> cri, by -> by, say -> say) + re = re_1c; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + w = stem + "i"; + } + + // Step 2 + re = re_2; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + suffix = fp[2]; + re = re_mgr0; + if (re.test(stem)) { + w = stem + step2list[suffix]; + } + } + + // Step 3 + re = re_3; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + suffix = fp[2]; + re = re_mgr0; + if (re.test(stem)) { + w = stem + step3list[suffix]; + } + } + + // Step 4 + re = re_4; + re2 = re2_4; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + re = re_mgr1; + if (re.test(stem)) { + w = stem; + } + } else if (re2.test(w)) { + var fp = re2.exec(w); + stem = fp[1] + fp[2]; + re2 = re_mgr1; + if (re2.test(stem)) { + w = stem; + } + } + + // Step 5 + re = re_5; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + re = re_mgr1; + re2 = re_meq1; + re3 = re3_5; + if (re.test(stem) || (re2.test(stem) && !(re3.test(stem)))) { + w = stem; + } + } + + re = re_5_1; + re2 = re_mgr1; + if (re.test(w) && re2.test(w)) { + re = re_1b_2; + w = w.replace(re,""); + } + + // and turn initial Y back to y + + if (firstch == "y") { + w = firstch.toLowerCase() + w.substr(1); + } + + return w; + }; + + return function (token) { + return token.update(porterStemmer); + } +})(); + +lunr.Pipeline.registerFunction(lunr.stemmer, 'stemmer') +/*! + * lunr.stopWordFilter + * Copyright (C) 2020 Oliver Nightingale + */ + +/** + * lunr.generateStopWordFilter builds a stopWordFilter function from the provided + * list of stop words. + * + * The built in lunr.stopWordFilter is built using this generator and can be used + * to generate custom stopWordFilters for applications or non English languages. + * + * @function + * @param {Array} token The token to pass through the filter + * @returns {lunr.PipelineFunction} + * @see lunr.Pipeline + * @see lunr.stopWordFilter + */ +lunr.generateStopWordFilter = function (stopWords) { + var words = stopWords.reduce(function (memo, stopWord) { + memo[stopWord] = stopWord + return memo + }, {}) + + return function (token) { + if (token && words[token.toString()] !== token.toString()) return token + } +} + +/** + * lunr.stopWordFilter is an English language stop word list filter, any words + * contained in the list will not be passed through the filter. + * + * This is intended to be used in the Pipeline. If the token does not pass the + * filter then undefined will be returned. + * + * @function + * @implements {lunr.PipelineFunction} + * @params {lunr.Token} token - A token to check for being a stop word. + * @returns {lunr.Token} + * @see {@link lunr.Pipeline} + */ +lunr.stopWordFilter = lunr.generateStopWordFilter([ + 'a', + 'able', + 'about', + 'across', + 'after', + 'all', + 'almost', + 'also', + 'am', + 'among', + 'an', + 'and', + 'any', + 'are', + 'as', + 'at', + 'be', + 'because', + 'been', + 'but', + 'by', + 'can', + 'cannot', + 'could', + 'dear', + 'did', + 'do', + 'does', + 'either', + 'else', + 'ever', + 'every', + 'for', + 'from', + 'get', + 'got', + 'had', + 'has', + 'have', + 'he', + 'her', + 'hers', + 'him', + 'his', + 'how', + 'however', + 'i', + 'if', + 'in', + 'into', + 'is', + 'it', + 'its', + 'just', + 'least', + 'let', + 'like', + 'likely', + 'may', + 'me', + 'might', + 'most', + 'must', + 'my', + 'neither', + 'no', + 'nor', + 'not', + 'of', + 'off', + 'often', + 'on', + 'only', + 'or', + 'other', + 'our', + 'own', + 'rather', + 'said', + 'say', + 'says', + 'she', + 'should', + 'since', + 'so', + 'some', + 'than', + 'that', + 'the', + 'their', + 'them', + 'then', + 'there', + 'these', + 'they', + 'this', + 'tis', + 'to', + 'too', + 'twas', + 'us', + 'wants', + 'was', + 'we', + 'were', + 'what', + 'when', + 'where', + 'which', + 'while', + 'who', + 'whom', + 'why', + 'will', + 'with', + 'would', + 'yet', + 'you', + 'your' +]) + +lunr.Pipeline.registerFunction(lunr.stopWordFilter, 'stopWordFilter') +/*! + * lunr.trimmer + * Copyright (C) 2020 Oliver Nightingale + */ + +/** + * lunr.trimmer is a pipeline function for trimming non word + * characters from the beginning and end of tokens before they + * enter the index. + * + * This implementation may not work correctly for non latin + * characters and should either be removed or adapted for use + * with languages with non-latin characters. + * + * @static + * @implements {lunr.PipelineFunction} + * @param {lunr.Token} token The token to pass through the filter + * @returns {lunr.Token} + * @see lunr.Pipeline + */ +lunr.trimmer = function (token) { + return token.update(function (s) { + return s.replace(/^\W+/, '').replace(/\W+$/, '') + }) +} + +lunr.Pipeline.registerFunction(lunr.trimmer, 'trimmer') +/*! + * lunr.TokenSet + * Copyright (C) 2020 Oliver Nightingale + */ + +/** + * A token set is used to store the unique list of all tokens + * within an index. Token sets are also used to represent an + * incoming query to the index, this query token set and index + * token set are then intersected to find which tokens to look + * up in the inverted index. + * + * A token set can hold multiple tokens, as in the case of the + * index token set, or it can hold a single token as in the + * case of a simple query token set. + * + * Additionally token sets are used to perform wildcard matching. + * Leading, contained and trailing wildcards are supported, and + * from this edit distance matching can also be provided. + * + * Token sets are implemented as a minimal finite state automata, + * where both common prefixes and suffixes are shared between tokens. + * This helps to reduce the space used for storing the token set. + * + * @constructor + */ +lunr.TokenSet = function () { + this.final = false + this.edges = {} + this.id = lunr.TokenSet._nextId + lunr.TokenSet._nextId += 1 +} + +/** + * Keeps track of the next, auto increment, identifier to assign + * to a new tokenSet. + * + * TokenSets require a unique identifier to be correctly minimised. + * + * @private + */ +lunr.TokenSet._nextId = 1 + +/** + * Creates a TokenSet instance from the given sorted array of words. + * + * @param {String[]} arr - A sorted array of strings to create the set from. + * @returns {lunr.TokenSet} + * @throws Will throw an error if the input array is not sorted. + */ +lunr.TokenSet.fromArray = function (arr) { + var builder = new lunr.TokenSet.Builder + + for (var i = 0, len = arr.length; i < len; i++) { + builder.insert(arr[i]) + } + + builder.finish() + return builder.root +} + +/** + * Creates a token set from a query clause. + * + * @private + * @param {Object} clause - A single clause from lunr.Query. + * @param {string} clause.term - The query clause term. + * @param {number} [clause.editDistance] - The optional edit distance for the term. + * @returns {lunr.TokenSet} + */ +lunr.TokenSet.fromClause = function (clause) { + if ('editDistance' in clause) { + return lunr.TokenSet.fromFuzzyString(clause.term, clause.editDistance) + } else { + return lunr.TokenSet.fromString(clause.term) + } +} + +/** + * Creates a token set representing a single string with a specified + * edit distance. + * + * Insertions, deletions, substitutions and transpositions are each + * treated as an edit distance of 1. + * + * Increasing the allowed edit distance will have a dramatic impact + * on the performance of both creating and intersecting these TokenSets. + * It is advised to keep the edit distance less than 3. + * + * @param {string} str - The string to create the token set from. + * @param {number} editDistance - The allowed edit distance to match. + * @returns {lunr.Vector} + */ +lunr.TokenSet.fromFuzzyString = function (str, editDistance) { + var root = new lunr.TokenSet + + var stack = [{ + node: root, + editsRemaining: editDistance, + str: str + }] + + while (stack.length) { + var frame = stack.pop() + + // no edit + if (frame.str.length > 0) { + var char = frame.str.charAt(0), + noEditNode + + if (char in frame.node.edges) { + noEditNode = frame.node.edges[char] + } else { + noEditNode = new lunr.TokenSet + frame.node.edges[char] = noEditNode + } + + if (frame.str.length == 1) { + noEditNode.final = true + } + + stack.push({ + node: noEditNode, + editsRemaining: frame.editsRemaining, + str: frame.str.slice(1) + }) + } + + if (frame.editsRemaining == 0) { + continue + } + + // insertion + if ("*" in frame.node.edges) { + var insertionNode = frame.node.edges["*"] + } else { + var insertionNode = new lunr.TokenSet + frame.node.edges["*"] = insertionNode + } + + if (frame.str.length == 0) { + insertionNode.final = true + } + + stack.push({ + node: insertionNode, + editsRemaining: frame.editsRemaining - 1, + str: frame.str + }) + + // deletion + // can only do a deletion if we have enough edits remaining + // and if there are characters left to delete in the string + if (frame.str.length > 1) { + stack.push({ + node: frame.node, + editsRemaining: frame.editsRemaining - 1, + str: frame.str.slice(1) + }) + } + + // deletion + // just removing the last character from the str + if (frame.str.length == 1) { + frame.node.final = true + } + + // substitution + // can only do a substitution if we have enough edits remaining + // and if there are characters left to substitute + if (frame.str.length >= 1) { + if ("*" in frame.node.edges) { + var substitutionNode = frame.node.edges["*"] + } else { + var substitutionNode = new lunr.TokenSet + frame.node.edges["*"] = substitutionNode + } + + if (frame.str.length == 1) { + substitutionNode.final = true + } + + stack.push({ + node: substitutionNode, + editsRemaining: frame.editsRemaining - 1, + str: frame.str.slice(1) + }) + } + + // transposition + // can only do a transposition if there are edits remaining + // and there are enough characters to transpose + if (frame.str.length > 1) { + var charA = frame.str.charAt(0), + charB = frame.str.charAt(1), + transposeNode + + if (charB in frame.node.edges) { + transposeNode = frame.node.edges[charB] + } else { + transposeNode = new lunr.TokenSet + frame.node.edges[charB] = transposeNode + } + + if (frame.str.length == 1) { + transposeNode.final = true + } + + stack.push({ + node: transposeNode, + editsRemaining: frame.editsRemaining - 1, + str: charA + frame.str.slice(2) + }) + } + } + + return root +} + +/** + * Creates a TokenSet from a string. + * + * The string may contain one or more wildcard characters (*) + * that will allow wildcard matching when intersecting with + * another TokenSet. + * + * @param {string} str - The string to create a TokenSet from. + * @returns {lunr.TokenSet} + */ +lunr.TokenSet.fromString = function (str) { + var node = new lunr.TokenSet, + root = node + + /* + * Iterates through all characters within the passed string + * appending a node for each character. + * + * When a wildcard character is found then a self + * referencing edge is introduced to continually match + * any number of any characters. + */ + for (var i = 0, len = str.length; i < len; i++) { + var char = str[i], + final = (i == len - 1) + + if (char == "*") { + node.edges[char] = node + node.final = final + + } else { + var next = new lunr.TokenSet + next.final = final + + node.edges[char] = next + node = next + } + } + + return root +} + +/** + * Converts this TokenSet into an array of strings + * contained within the TokenSet. + * + * This is not intended to be used on a TokenSet that + * contains wildcards, in these cases the results are + * undefined and are likely to cause an infinite loop. + * + * @returns {string[]} + */ +lunr.TokenSet.prototype.toArray = function () { + var words = [] + + var stack = [{ + prefix: "", + node: this + }] + + while (stack.length) { + var frame = stack.pop(), + edges = Object.keys(frame.node.edges), + len = edges.length + + if (frame.node.final) { + /* In Safari, at this point the prefix is sometimes corrupted, see: + * https://github.com/olivernn/lunr.js/issues/279 Calling any + * String.prototype method forces Safari to "cast" this string to what + * it's supposed to be, fixing the bug. */ + frame.prefix.charAt(0) + words.push(frame.prefix) + } + + for (var i = 0; i < len; i++) { + var edge = edges[i] + + stack.push({ + prefix: frame.prefix.concat(edge), + node: frame.node.edges[edge] + }) + } + } + + return words +} + +/** + * Generates a string representation of a TokenSet. + * + * This is intended to allow TokenSets to be used as keys + * in objects, largely to aid the construction and minimisation + * of a TokenSet. As such it is not designed to be a human + * friendly representation of the TokenSet. + * + * @returns {string} + */ +lunr.TokenSet.prototype.toString = function () { + // NOTE: Using Object.keys here as this.edges is very likely + // to enter 'hash-mode' with many keys being added + // + // avoiding a for-in loop here as it leads to the function + // being de-optimised (at least in V8). From some simple + // benchmarks the performance is comparable, but allowing + // V8 to optimize may mean easy performance wins in the future. + + if (this._str) { + return this._str + } + + var str = this.final ? '1' : '0', + labels = Object.keys(this.edges).sort(), + len = labels.length + + for (var i = 0; i < len; i++) { + var label = labels[i], + node = this.edges[label] + + str = str + label + node.id + } + + return str +} + +/** + * Returns a new TokenSet that is the intersection of + * this TokenSet and the passed TokenSet. + * + * This intersection will take into account any wildcards + * contained within the TokenSet. + * + * @param {lunr.TokenSet} b - An other TokenSet to intersect with. + * @returns {lunr.TokenSet} + */ +lunr.TokenSet.prototype.intersect = function (b) { + var output = new lunr.TokenSet, + frame = undefined + + var stack = [{ + qNode: b, + output: output, + node: this + }] + + while (stack.length) { + frame = stack.pop() + + // NOTE: As with the #toString method, we are using + // Object.keys and a for loop instead of a for-in loop + // as both of these objects enter 'hash' mode, causing + // the function to be de-optimised in V8 + var qEdges = Object.keys(frame.qNode.edges), + qLen = qEdges.length, + nEdges = Object.keys(frame.node.edges), + nLen = nEdges.length + + for (var q = 0; q < qLen; q++) { + var qEdge = qEdges[q] + + for (var n = 0; n < nLen; n++) { + var nEdge = nEdges[n] + + if (nEdge == qEdge || qEdge == '*') { + var node = frame.node.edges[nEdge], + qNode = frame.qNode.edges[qEdge], + final = node.final && qNode.final, + next = undefined + + if (nEdge in frame.output.edges) { + // an edge already exists for this character + // no need to create a new node, just set the finality + // bit unless this node is already final + next = frame.output.edges[nEdge] + next.final = next.final || final + + } else { + // no edge exists yet, must create one + // set the finality bit and insert it + // into the output + next = new lunr.TokenSet + next.final = final + frame.output.edges[nEdge] = next + } + + stack.push({ + qNode: qNode, + output: next, + node: node + }) + } + } + } + } + + return output +} +lunr.TokenSet.Builder = function () { + this.previousWord = "" + this.root = new lunr.TokenSet + this.uncheckedNodes = [] + this.minimizedNodes = {} +} + +lunr.TokenSet.Builder.prototype.insert = function (word) { + var node, + commonPrefix = 0 + + if (word < this.previousWord) { + throw new Error ("Out of order word insertion") + } + + for (var i = 0; i < word.length && i < this.previousWord.length; i++) { + if (word[i] != this.previousWord[i]) break + commonPrefix++ + } + + this.minimize(commonPrefix) + + if (this.uncheckedNodes.length == 0) { + node = this.root + } else { + node = this.uncheckedNodes[this.uncheckedNodes.length - 1].child + } + + for (var i = commonPrefix; i < word.length; i++) { + var nextNode = new lunr.TokenSet, + char = word[i] + + node.edges[char] = nextNode + + this.uncheckedNodes.push({ + parent: node, + char: char, + child: nextNode + }) + + node = nextNode + } + + node.final = true + this.previousWord = word +} + +lunr.TokenSet.Builder.prototype.finish = function () { + this.minimize(0) +} + +lunr.TokenSet.Builder.prototype.minimize = function (downTo) { + for (var i = this.uncheckedNodes.length - 1; i >= downTo; i--) { + var node = this.uncheckedNodes[i], + childKey = node.child.toString() + + if (childKey in this.minimizedNodes) { + node.parent.edges[node.char] = this.minimizedNodes[childKey] + } else { + // Cache the key for this node since + // we know it can't change anymore + node.child._str = childKey + + this.minimizedNodes[childKey] = node.child + } + + this.uncheckedNodes.pop() + } +} +/*! + * lunr.Index + * Copyright (C) 2020 Oliver Nightingale + */ + +/** + * An index contains the built index of all documents and provides a query interface + * to the index. + * + * Usually instances of lunr.Index will not be created using this constructor, instead + * lunr.Builder should be used to construct new indexes, or lunr.Index.load should be + * used to load previously built and serialized indexes. + * + * @constructor + * @param {Object} attrs - The attributes of the built search index. + * @param {Object} attrs.invertedIndex - An index of term/field to document reference. + * @param {Object} attrs.fieldVectors - Field vectors + * @param {lunr.TokenSet} attrs.tokenSet - An set of all corpus tokens. + * @param {string[]} attrs.fields - The names of indexed document fields. + * @param {lunr.Pipeline} attrs.pipeline - The pipeline to use for search terms. + */ +lunr.Index = function (attrs) { + this.invertedIndex = attrs.invertedIndex + this.fieldVectors = attrs.fieldVectors + this.tokenSet = attrs.tokenSet + this.fields = attrs.fields + this.pipeline = attrs.pipeline +} + +/** + * A result contains details of a document matching a search query. + * @typedef {Object} lunr.Index~Result + * @property {string} ref - The reference of the document this result represents. + * @property {number} score - A number between 0 and 1 representing how similar this document is to the query. + * @property {lunr.MatchData} matchData - Contains metadata about this match including which term(s) caused the match. + */ + +/** + * Although lunr provides the ability to create queries using lunr.Query, it also provides a simple + * query language which itself is parsed into an instance of lunr.Query. + * + * For programmatically building queries it is advised to directly use lunr.Query, the query language + * is best used for human entered text rather than program generated text. + * + * At its simplest queries can just be a single term, e.g. `hello`, multiple terms are also supported + * and will be combined with OR, e.g `hello world` will match documents that contain either 'hello' + * or 'world', though those that contain both will rank higher in the results. + * + * Wildcards can be included in terms to match one or more unspecified characters, these wildcards can + * be inserted anywhere within the term, and more than one wildcard can exist in a single term. Adding + * wildcards will increase the number of documents that will be found but can also have a negative + * impact on query performance, especially with wildcards at the beginning of a term. + * + * Terms can be restricted to specific fields, e.g. `title:hello`, only documents with the term + * hello in the title field will match this query. Using a field not present in the index will lead + * to an error being thrown. + * + * Modifiers can also be added to terms, lunr supports edit distance and boost modifiers on terms. A term + * boost will make documents matching that term score higher, e.g. `foo^5`. Edit distance is also supported + * to provide fuzzy matching, e.g. 'hello~2' will match documents with hello with an edit distance of 2. + * Avoid large values for edit distance to improve query performance. + * + * Each term also supports a presence modifier. By default a term's presence in document is optional, however + * this can be changed to either required or prohibited. For a term's presence to be required in a document the + * term should be prefixed with a '+', e.g. `+foo bar` is a search for documents that must contain 'foo' and + * optionally contain 'bar'. Conversely a leading '-' sets the terms presence to prohibited, i.e. it must not + * appear in a document, e.g. `-foo bar` is a search for documents that do not contain 'foo' but may contain 'bar'. + * + * To escape special characters the backslash character '\' can be used, this allows searches to include + * characters that would normally be considered modifiers, e.g. `foo\~2` will search for a term "foo~2" instead + * of attempting to apply a boost of 2 to the search term "foo". + * + * @typedef {string} lunr.Index~QueryString + * @example Simple single term query + * hello + * @example Multiple term query + * hello world + * @example term scoped to a field + * title:hello + * @example term with a boost of 10 + * hello^10 + * @example term with an edit distance of 2 + * hello~2 + * @example terms with presence modifiers + * -foo +bar baz + */ + +/** + * Performs a search against the index using lunr query syntax. + * + * Results will be returned sorted by their score, the most relevant results + * will be returned first. For details on how the score is calculated, please see + * the {@link https://lunrjs.com/guides/searching.html#scoring|guide}. + * + * For more programmatic querying use lunr.Index#query. + * + * @param {lunr.Index~QueryString} queryString - A string containing a lunr query. + * @throws {lunr.QueryParseError} If the passed query string cannot be parsed. + * @returns {lunr.Index~Result[]} + */ +lunr.Index.prototype.search = function (queryString) { + return this.query(function (query) { + var parser = new lunr.QueryParser(queryString, query) + parser.parse() + }) +} + +/** + * A query builder callback provides a query object to be used to express + * the query to perform on the index. + * + * @callback lunr.Index~queryBuilder + * @param {lunr.Query} query - The query object to build up. + * @this lunr.Query + */ + +/** + * Performs a query against the index using the yielded lunr.Query object. + * + * If performing programmatic queries against the index, this method is preferred + * over lunr.Index#search so as to avoid the additional query parsing overhead. + * + * A query object is yielded to the supplied function which should be used to + * express the query to be run against the index. + * + * Note that although this function takes a callback parameter it is _not_ an + * asynchronous operation, the callback is just yielded a query object to be + * customized. + * + * @param {lunr.Index~queryBuilder} fn - A function that is used to build the query. + * @returns {lunr.Index~Result[]} + */ +lunr.Index.prototype.query = function (fn) { + // for each query clause + // * process terms + // * expand terms from token set + // * find matching documents and metadata + // * get document vectors + // * score documents + + var query = new lunr.Query(this.fields), + matchingFields = Object.create(null), + queryVectors = Object.create(null), + termFieldCache = Object.create(null), + requiredMatches = Object.create(null), + prohibitedMatches = Object.create(null) + + /* + * To support field level boosts a query vector is created per + * field. An empty vector is eagerly created to support negated + * queries. + */ + for (var i = 0; i < this.fields.length; i++) { + queryVectors[this.fields[i]] = new lunr.Vector + } + + fn.call(query, query) + + for (var i = 0; i < query.clauses.length; i++) { + /* + * Unless the pipeline has been disabled for this term, which is + * the case for terms with wildcards, we need to pass the clause + * term through the search pipeline. A pipeline returns an array + * of processed terms. Pipeline functions may expand the passed + * term, which means we may end up performing multiple index lookups + * for a single query term. + */ + var clause = query.clauses[i], + terms = null, + clauseMatches = lunr.Set.empty + + if (clause.usePipeline) { + terms = this.pipeline.runString(clause.term, { + fields: clause.fields + }) + } else { + terms = [clause.term] + } + + for (var m = 0; m < terms.length; m++) { + var term = terms[m] + + /* + * Each term returned from the pipeline needs to use the same query + * clause object, e.g. the same boost and or edit distance. The + * simplest way to do this is to re-use the clause object but mutate + * its term property. + */ + clause.term = term + + /* + * From the term in the clause we create a token set which will then + * be used to intersect the indexes token set to get a list of terms + * to lookup in the inverted index + */ + var termTokenSet = lunr.TokenSet.fromClause(clause), + expandedTerms = this.tokenSet.intersect(termTokenSet).toArray() + + /* + * If a term marked as required does not exist in the tokenSet it is + * impossible for the search to return any matches. We set all the field + * scoped required matches set to empty and stop examining any further + * clauses. + */ + if (expandedTerms.length === 0 && clause.presence === lunr.Query.presence.REQUIRED) { + for (var k = 0; k < clause.fields.length; k++) { + var field = clause.fields[k] + requiredMatches[field] = lunr.Set.empty + } + + break + } + + for (var j = 0; j < expandedTerms.length; j++) { + /* + * For each term get the posting and termIndex, this is required for + * building the query vector. + */ + var expandedTerm = expandedTerms[j], + posting = this.invertedIndex[expandedTerm], + termIndex = posting._index + + for (var k = 0; k < clause.fields.length; k++) { + /* + * For each field that this query term is scoped by (by default + * all fields are in scope) we need to get all the document refs + * that have this term in that field. + * + * The posting is the entry in the invertedIndex for the matching + * term from above. + */ + var field = clause.fields[k], + fieldPosting = posting[field], + matchingDocumentRefs = Object.keys(fieldPosting), + termField = expandedTerm + "/" + field, + matchingDocumentsSet = new lunr.Set(matchingDocumentRefs) + + /* + * if the presence of this term is required ensure that the matching + * documents are added to the set of required matches for this clause. + * + */ + if (clause.presence == lunr.Query.presence.REQUIRED) { + clauseMatches = clauseMatches.union(matchingDocumentsSet) + + if (requiredMatches[field] === undefined) { + requiredMatches[field] = lunr.Set.complete + } + } + + /* + * if the presence of this term is prohibited ensure that the matching + * documents are added to the set of prohibited matches for this field, + * creating that set if it does not yet exist. + */ + if (clause.presence == lunr.Query.presence.PROHIBITED) { + if (prohibitedMatches[field] === undefined) { + prohibitedMatches[field] = lunr.Set.empty + } + + prohibitedMatches[field] = prohibitedMatches[field].union(matchingDocumentsSet) + + /* + * Prohibited matches should not be part of the query vector used for + * similarity scoring and no metadata should be extracted so we continue + * to the next field + */ + continue + } + + /* + * The query field vector is populated using the termIndex found for + * the term and a unit value with the appropriate boost applied. + * Using upsert because there could already be an entry in the vector + * for the term we are working with. In that case we just add the scores + * together. + */ + queryVectors[field].upsert(termIndex, clause.boost, function (a, b) { return a + b }) + + /** + * If we've already seen this term, field combo then we've already collected + * the matching documents and metadata, no need to go through all that again + */ + if (termFieldCache[termField]) { + continue + } + + for (var l = 0; l < matchingDocumentRefs.length; l++) { + /* + * All metadata for this term/field/document triple + * are then extracted and collected into an instance + * of lunr.MatchData ready to be returned in the query + * results + */ + var matchingDocumentRef = matchingDocumentRefs[l], + matchingFieldRef = new lunr.FieldRef (matchingDocumentRef, field), + metadata = fieldPosting[matchingDocumentRef], + fieldMatch + + if ((fieldMatch = matchingFields[matchingFieldRef]) === undefined) { + matchingFields[matchingFieldRef] = new lunr.MatchData (expandedTerm, field, metadata) + } else { + fieldMatch.add(expandedTerm, field, metadata) + } + + } + + termFieldCache[termField] = true + } + } + } + + /** + * If the presence was required we need to update the requiredMatches field sets. + * We do this after all fields for the term have collected their matches because + * the clause terms presence is required in _any_ of the fields not _all_ of the + * fields. + */ + if (clause.presence === lunr.Query.presence.REQUIRED) { + for (var k = 0; k < clause.fields.length; k++) { + var field = clause.fields[k] + requiredMatches[field] = requiredMatches[field].intersect(clauseMatches) + } + } + } + + /** + * Need to combine the field scoped required and prohibited + * matching documents into a global set of required and prohibited + * matches + */ + var allRequiredMatches = lunr.Set.complete, + allProhibitedMatches = lunr.Set.empty + + for (var i = 0; i < this.fields.length; i++) { + var field = this.fields[i] + + if (requiredMatches[field]) { + allRequiredMatches = allRequiredMatches.intersect(requiredMatches[field]) + } + + if (prohibitedMatches[field]) { + allProhibitedMatches = allProhibitedMatches.union(prohibitedMatches[field]) + } + } + + var matchingFieldRefs = Object.keys(matchingFields), + results = [], + matches = Object.create(null) + + /* + * If the query is negated (contains only prohibited terms) + * we need to get _all_ fieldRefs currently existing in the + * index. This is only done when we know that the query is + * entirely prohibited terms to avoid any cost of getting all + * fieldRefs unnecessarily. + * + * Additionally, blank MatchData must be created to correctly + * populate the results. + */ + if (query.isNegated()) { + matchingFieldRefs = Object.keys(this.fieldVectors) + + for (var i = 0; i < matchingFieldRefs.length; i++) { + var matchingFieldRef = matchingFieldRefs[i] + var fieldRef = lunr.FieldRef.fromString(matchingFieldRef) + matchingFields[matchingFieldRef] = new lunr.MatchData + } + } + + for (var i = 0; i < matchingFieldRefs.length; i++) { + /* + * Currently we have document fields that match the query, but we + * need to return documents. The matchData and scores are combined + * from multiple fields belonging to the same document. + * + * Scores are calculated by field, using the query vectors created + * above, and combined into a final document score using addition. + */ + var fieldRef = lunr.FieldRef.fromString(matchingFieldRefs[i]), + docRef = fieldRef.docRef + + if (!allRequiredMatches.contains(docRef)) { + continue + } + + if (allProhibitedMatches.contains(docRef)) { + continue + } + + var fieldVector = this.fieldVectors[fieldRef], + score = queryVectors[fieldRef.fieldName].similarity(fieldVector), + docMatch + + if ((docMatch = matches[docRef]) !== undefined) { + docMatch.score += score + docMatch.matchData.combine(matchingFields[fieldRef]) + } else { + var match = { + ref: docRef, + score: score, + matchData: matchingFields[fieldRef] + } + matches[docRef] = match + results.push(match) + } + } + + /* + * Sort the results objects by score, highest first. + */ + return results.sort(function (a, b) { + return b.score - a.score + }) +} + +/** + * Prepares the index for JSON serialization. + * + * The schema for this JSON blob will be described in a + * separate JSON schema file. + * + * @returns {Object} + */ +lunr.Index.prototype.toJSON = function () { + var invertedIndex = Object.keys(this.invertedIndex) + .sort() + .map(function (term) { + return [term, this.invertedIndex[term]] + }, this) + + var fieldVectors = Object.keys(this.fieldVectors) + .map(function (ref) { + return [ref, this.fieldVectors[ref].toJSON()] + }, this) + + return { + version: lunr.version, + fields: this.fields, + fieldVectors: fieldVectors, + invertedIndex: invertedIndex, + pipeline: this.pipeline.toJSON() + } +} + +/** + * Loads a previously serialized lunr.Index + * + * @param {Object} serializedIndex - A previously serialized lunr.Index + * @returns {lunr.Index} + */ +lunr.Index.load = function (serializedIndex) { + var attrs = {}, + fieldVectors = {}, + serializedVectors = serializedIndex.fieldVectors, + invertedIndex = Object.create(null), + serializedInvertedIndex = serializedIndex.invertedIndex, + tokenSetBuilder = new lunr.TokenSet.Builder, + pipeline = lunr.Pipeline.load(serializedIndex.pipeline) + + if (serializedIndex.version != lunr.version) { + lunr.utils.warn("Version mismatch when loading serialised index. Current version of lunr '" + lunr.version + "' does not match serialized index '" + serializedIndex.version + "'") + } + + for (var i = 0; i < serializedVectors.length; i++) { + var tuple = serializedVectors[i], + ref = tuple[0], + elements = tuple[1] + + fieldVectors[ref] = new lunr.Vector(elements) + } + + for (var i = 0; i < serializedInvertedIndex.length; i++) { + var tuple = serializedInvertedIndex[i], + term = tuple[0], + posting = tuple[1] + + tokenSetBuilder.insert(term) + invertedIndex[term] = posting + } + + tokenSetBuilder.finish() + + attrs.fields = serializedIndex.fields + + attrs.fieldVectors = fieldVectors + attrs.invertedIndex = invertedIndex + attrs.tokenSet = tokenSetBuilder.root + attrs.pipeline = pipeline + + return new lunr.Index(attrs) +} +/*! + * lunr.Builder + * Copyright (C) 2020 Oliver Nightingale + */ + +/** + * lunr.Builder performs indexing on a set of documents and + * returns instances of lunr.Index ready for querying. + * + * All configuration of the index is done via the builder, the + * fields to index, the document reference, the text processing + * pipeline and document scoring parameters are all set on the + * builder before indexing. + * + * @constructor + * @property {string} _ref - Internal reference to the document reference field. + * @property {string[]} _fields - Internal reference to the document fields to index. + * @property {object} invertedIndex - The inverted index maps terms to document fields. + * @property {object} documentTermFrequencies - Keeps track of document term frequencies. + * @property {object} documentLengths - Keeps track of the length of documents added to the index. + * @property {lunr.tokenizer} tokenizer - Function for splitting strings into tokens for indexing. + * @property {lunr.Pipeline} pipeline - The pipeline performs text processing on tokens before indexing. + * @property {lunr.Pipeline} searchPipeline - A pipeline for processing search terms before querying the index. + * @property {number} documentCount - Keeps track of the total number of documents indexed. + * @property {number} _b - A parameter to control field length normalization, setting this to 0 disabled normalization, 1 fully normalizes field lengths, the default value is 0.75. + * @property {number} _k1 - A parameter to control how quickly an increase in term frequency results in term frequency saturation, the default value is 1.2. + * @property {number} termIndex - A counter incremented for each unique term, used to identify a terms position in the vector space. + * @property {array} metadataWhitelist - A list of metadata keys that have been whitelisted for entry in the index. + */ +lunr.Builder = function () { + this._ref = "id" + this._fields = Object.create(null) + this._documents = Object.create(null) + this.invertedIndex = Object.create(null) + this.fieldTermFrequencies = {} + this.fieldLengths = {} + this.tokenizer = lunr.tokenizer + this.pipeline = new lunr.Pipeline + this.searchPipeline = new lunr.Pipeline + this.documentCount = 0 + this._b = 0.75 + this._k1 = 1.2 + this.termIndex = 0 + this.metadataWhitelist = [] +} + +/** + * Sets the document field used as the document reference. Every document must have this field. + * The type of this field in the document should be a string, if it is not a string it will be + * coerced into a string by calling toString. + * + * The default ref is 'id'. + * + * The ref should _not_ be changed during indexing, it should be set before any documents are + * added to the index. Changing it during indexing can lead to inconsistent results. + * + * @param {string} ref - The name of the reference field in the document. + */ +lunr.Builder.prototype.ref = function (ref) { + this._ref = ref +} + +/** + * A function that is used to extract a field from a document. + * + * Lunr expects a field to be at the top level of a document, if however the field + * is deeply nested within a document an extractor function can be used to extract + * the right field for indexing. + * + * @callback fieldExtractor + * @param {object} doc - The document being added to the index. + * @returns {?(string|object|object[])} obj - The object that will be indexed for this field. + * @example Extracting a nested field + * function (doc) { return doc.nested.field } + */ + +/** + * Adds a field to the list of document fields that will be indexed. Every document being + * indexed should have this field. Null values for this field in indexed documents will + * not cause errors but will limit the chance of that document being retrieved by searches. + * + * All fields should be added before adding documents to the index. Adding fields after + * a document has been indexed will have no effect on already indexed documents. + * + * Fields can be boosted at build time. This allows terms within that field to have more + * importance when ranking search results. Use a field boost to specify that matches within + * one field are more important than other fields. + * + * @param {string} fieldName - The name of a field to index in all documents. + * @param {object} attributes - Optional attributes associated with this field. + * @param {number} [attributes.boost=1] - Boost applied to all terms within this field. + * @param {fieldExtractor} [attributes.extractor] - Function to extract a field from a document. + * @throws {RangeError} fieldName cannot contain unsupported characters '/' + */ +lunr.Builder.prototype.field = function (fieldName, attributes) { + if (/\//.test(fieldName)) { + throw new RangeError ("Field '" + fieldName + "' contains illegal character '/'") + } + + this._fields[fieldName] = attributes || {} +} + +/** + * A parameter to tune the amount of field length normalisation that is applied when + * calculating relevance scores. A value of 0 will completely disable any normalisation + * and a value of 1 will fully normalise field lengths. The default is 0.75. Values of b + * will be clamped to the range 0 - 1. + * + * @param {number} number - The value to set for this tuning parameter. + */ +lunr.Builder.prototype.b = function (number) { + if (number < 0) { + this._b = 0 + } else if (number > 1) { + this._b = 1 + } else { + this._b = number + } +} + +/** + * A parameter that controls the speed at which a rise in term frequency results in term + * frequency saturation. The default value is 1.2. Setting this to a higher value will give + * slower saturation levels, a lower value will result in quicker saturation. + * + * @param {number} number - The value to set for this tuning parameter. + */ +lunr.Builder.prototype.k1 = function (number) { + this._k1 = number +} + +/** + * Adds a document to the index. + * + * Before adding fields to the index the index should have been fully setup, with the document + * ref and all fields to index already having been specified. + * + * The document must have a field name as specified by the ref (by default this is 'id') and + * it should have all fields defined for indexing, though null or undefined values will not + * cause errors. + * + * Entire documents can be boosted at build time. Applying a boost to a document indicates that + * this document should rank higher in search results than other documents. + * + * @param {object} doc - The document to add to the index. + * @param {object} attributes - Optional attributes associated with this document. + * @param {number} [attributes.boost=1] - Boost applied to all terms within this document. + */ +lunr.Builder.prototype.add = function (doc, attributes) { + var docRef = doc[this._ref], + fields = Object.keys(this._fields) + + this._documents[docRef] = attributes || {} + this.documentCount += 1 + + for (var i = 0; i < fields.length; i++) { + var fieldName = fields[i], + extractor = this._fields[fieldName].extractor, + field = extractor ? extractor(doc) : doc[fieldName], + tokens = this.tokenizer(field, { + fields: [fieldName] + }), + terms = this.pipeline.run(tokens), + fieldRef = new lunr.FieldRef (docRef, fieldName), + fieldTerms = Object.create(null) + + this.fieldTermFrequencies[fieldRef] = fieldTerms + this.fieldLengths[fieldRef] = 0 + + // store the length of this field for this document + this.fieldLengths[fieldRef] += terms.length + + // calculate term frequencies for this field + for (var j = 0; j < terms.length; j++) { + var term = terms[j] + + if (fieldTerms[term] == undefined) { + fieldTerms[term] = 0 + } + + fieldTerms[term] += 1 + + // add to inverted index + // create an initial posting if one doesn't exist + if (this.invertedIndex[term] == undefined) { + var posting = Object.create(null) + posting["_index"] = this.termIndex + this.termIndex += 1 + + for (var k = 0; k < fields.length; k++) { + posting[fields[k]] = Object.create(null) + } + + this.invertedIndex[term] = posting + } + + // add an entry for this term/fieldName/docRef to the invertedIndex + if (this.invertedIndex[term][fieldName][docRef] == undefined) { + this.invertedIndex[term][fieldName][docRef] = Object.create(null) + } + + // store all whitelisted metadata about this token in the + // inverted index + for (var l = 0; l < this.metadataWhitelist.length; l++) { + var metadataKey = this.metadataWhitelist[l], + metadata = term.metadata[metadataKey] + + if (this.invertedIndex[term][fieldName][docRef][metadataKey] == undefined) { + this.invertedIndex[term][fieldName][docRef][metadataKey] = [] + } + + this.invertedIndex[term][fieldName][docRef][metadataKey].push(metadata) + } + } + + } +} + +/** + * Calculates the average document length for this index + * + * @private + */ +lunr.Builder.prototype.calculateAverageFieldLengths = function () { + + var fieldRefs = Object.keys(this.fieldLengths), + numberOfFields = fieldRefs.length, + accumulator = {}, + documentsWithField = {} + + for (var i = 0; i < numberOfFields; i++) { + var fieldRef = lunr.FieldRef.fromString(fieldRefs[i]), + field = fieldRef.fieldName + + documentsWithField[field] || (documentsWithField[field] = 0) + documentsWithField[field] += 1 + + accumulator[field] || (accumulator[field] = 0) + accumulator[field] += this.fieldLengths[fieldRef] + } + + var fields = Object.keys(this._fields) + + for (var i = 0; i < fields.length; i++) { + var fieldName = fields[i] + accumulator[fieldName] = accumulator[fieldName] / documentsWithField[fieldName] + } + + this.averageFieldLength = accumulator +} + +/** + * Builds a vector space model of every document using lunr.Vector + * + * @private + */ +lunr.Builder.prototype.createFieldVectors = function () { + var fieldVectors = {}, + fieldRefs = Object.keys(this.fieldTermFrequencies), + fieldRefsLength = fieldRefs.length, + termIdfCache = Object.create(null) + + for (var i = 0; i < fieldRefsLength; i++) { + var fieldRef = lunr.FieldRef.fromString(fieldRefs[i]), + fieldName = fieldRef.fieldName, + fieldLength = this.fieldLengths[fieldRef], + fieldVector = new lunr.Vector, + termFrequencies = this.fieldTermFrequencies[fieldRef], + terms = Object.keys(termFrequencies), + termsLength = terms.length + + + var fieldBoost = this._fields[fieldName].boost || 1, + docBoost = this._documents[fieldRef.docRef].boost || 1 + + for (var j = 0; j < termsLength; j++) { + var term = terms[j], + tf = termFrequencies[term], + termIndex = this.invertedIndex[term]._index, + idf, score, scoreWithPrecision + + if (termIdfCache[term] === undefined) { + idf = lunr.idf(this.invertedIndex[term], this.documentCount) + termIdfCache[term] = idf + } else { + idf = termIdfCache[term] + } + + score = idf * ((this._k1 + 1) * tf) / (this._k1 * (1 - this._b + this._b * (fieldLength / this.averageFieldLength[fieldName])) + tf) + score *= fieldBoost + score *= docBoost + scoreWithPrecision = Math.round(score * 1000) / 1000 + // Converts 1.23456789 to 1.234. + // Reducing the precision so that the vectors take up less + // space when serialised. Doing it now so that they behave + // the same before and after serialisation. Also, this is + // the fastest approach to reducing a number's precision in + // JavaScript. + + fieldVector.insert(termIndex, scoreWithPrecision) + } + + fieldVectors[fieldRef] = fieldVector + } + + this.fieldVectors = fieldVectors +} + +/** + * Creates a token set of all tokens in the index using lunr.TokenSet + * + * @private + */ +lunr.Builder.prototype.createTokenSet = function () { + this.tokenSet = lunr.TokenSet.fromArray( + Object.keys(this.invertedIndex).sort() + ) +} + +/** + * Builds the index, creating an instance of lunr.Index. + * + * This completes the indexing process and should only be called + * once all documents have been added to the index. + * + * @returns {lunr.Index} + */ +lunr.Builder.prototype.build = function () { + this.calculateAverageFieldLengths() + this.createFieldVectors() + this.createTokenSet() + + return new lunr.Index({ + invertedIndex: this.invertedIndex, + fieldVectors: this.fieldVectors, + tokenSet: this.tokenSet, + fields: Object.keys(this._fields), + pipeline: this.searchPipeline + }) +} + +/** + * Applies a plugin to the index builder. + * + * A plugin is a function that is called with the index builder as its context. + * Plugins can be used to customise or extend the behaviour of the index + * in some way. A plugin is just a function, that encapsulated the custom + * behaviour that should be applied when building the index. + * + * The plugin function will be called with the index builder as its argument, additional + * arguments can also be passed when calling use. The function will be called + * with the index builder as its context. + * + * @param {Function} plugin The plugin to apply. + */ +lunr.Builder.prototype.use = function (fn) { + var args = Array.prototype.slice.call(arguments, 1) + args.unshift(this) + fn.apply(this, args) +} +/** + * Contains and collects metadata about a matching document. + * A single instance of lunr.MatchData is returned as part of every + * lunr.Index~Result. + * + * @constructor + * @param {string} term - The term this match data is associated with + * @param {string} field - The field in which the term was found + * @param {object} metadata - The metadata recorded about this term in this field + * @property {object} metadata - A cloned collection of metadata associated with this document. + * @see {@link lunr.Index~Result} + */ +lunr.MatchData = function (term, field, metadata) { + var clonedMetadata = Object.create(null), + metadataKeys = Object.keys(metadata || {}) + + // Cloning the metadata to prevent the original + // being mutated during match data combination. + // Metadata is kept in an array within the inverted + // index so cloning the data can be done with + // Array#slice + for (var i = 0; i < metadataKeys.length; i++) { + var key = metadataKeys[i] + clonedMetadata[key] = metadata[key].slice() + } + + this.metadata = Object.create(null) + + if (term !== undefined) { + this.metadata[term] = Object.create(null) + this.metadata[term][field] = clonedMetadata + } +} + +/** + * An instance of lunr.MatchData will be created for every term that matches a + * document. However only one instance is required in a lunr.Index~Result. This + * method combines metadata from another instance of lunr.MatchData with this + * objects metadata. + * + * @param {lunr.MatchData} otherMatchData - Another instance of match data to merge with this one. + * @see {@link lunr.Index~Result} + */ +lunr.MatchData.prototype.combine = function (otherMatchData) { + var terms = Object.keys(otherMatchData.metadata) + + for (var i = 0; i < terms.length; i++) { + var term = terms[i], + fields = Object.keys(otherMatchData.metadata[term]) + + if (this.metadata[term] == undefined) { + this.metadata[term] = Object.create(null) + } + + for (var j = 0; j < fields.length; j++) { + var field = fields[j], + keys = Object.keys(otherMatchData.metadata[term][field]) + + if (this.metadata[term][field] == undefined) { + this.metadata[term][field] = Object.create(null) + } + + for (var k = 0; k < keys.length; k++) { + var key = keys[k] + + if (this.metadata[term][field][key] == undefined) { + this.metadata[term][field][key] = otherMatchData.metadata[term][field][key] + } else { + this.metadata[term][field][key] = this.metadata[term][field][key].concat(otherMatchData.metadata[term][field][key]) + } + + } + } + } +} + +/** + * Add metadata for a term/field pair to this instance of match data. + * + * @param {string} term - The term this match data is associated with + * @param {string} field - The field in which the term was found + * @param {object} metadata - The metadata recorded about this term in this field + */ +lunr.MatchData.prototype.add = function (term, field, metadata) { + if (!(term in this.metadata)) { + this.metadata[term] = Object.create(null) + this.metadata[term][field] = metadata + return + } + + if (!(field in this.metadata[term])) { + this.metadata[term][field] = metadata + return + } + + var metadataKeys = Object.keys(metadata) + + for (var i = 0; i < metadataKeys.length; i++) { + var key = metadataKeys[i] + + if (key in this.metadata[term][field]) { + this.metadata[term][field][key] = this.metadata[term][field][key].concat(metadata[key]) + } else { + this.metadata[term][field][key] = metadata[key] + } + } +} +/** + * A lunr.Query provides a programmatic way of defining queries to be performed + * against a {@link lunr.Index}. + * + * Prefer constructing a lunr.Query using the {@link lunr.Index#query} method + * so the query object is pre-initialized with the right index fields. + * + * @constructor + * @property {lunr.Query~Clause[]} clauses - An array of query clauses. + * @property {string[]} allFields - An array of all available fields in a lunr.Index. + */ +lunr.Query = function (allFields) { + this.clauses = [] + this.allFields = allFields +} + +/** + * Constants for indicating what kind of automatic wildcard insertion will be used when constructing a query clause. + * + * This allows wildcards to be added to the beginning and end of a term without having to manually do any string + * concatenation. + * + * The wildcard constants can be bitwise combined to select both leading and trailing wildcards. + * + * @constant + * @default + * @property {number} wildcard.NONE - The term will have no wildcards inserted, this is the default behaviour + * @property {number} wildcard.LEADING - Prepend the term with a wildcard, unless a leading wildcard already exists + * @property {number} wildcard.TRAILING - Append a wildcard to the term, unless a trailing wildcard already exists + * @see lunr.Query~Clause + * @see lunr.Query#clause + * @see lunr.Query#term + * @example query term with trailing wildcard + * query.term('foo', { wildcard: lunr.Query.wildcard.TRAILING }) + * @example query term with leading and trailing wildcard + * query.term('foo', { + * wildcard: lunr.Query.wildcard.LEADING | lunr.Query.wildcard.TRAILING + * }) + */ + +lunr.Query.wildcard = new String ("*") +lunr.Query.wildcard.NONE = 0 +lunr.Query.wildcard.LEADING = 1 +lunr.Query.wildcard.TRAILING = 2 + +/** + * Constants for indicating what kind of presence a term must have in matching documents. + * + * @constant + * @enum {number} + * @see lunr.Query~Clause + * @see lunr.Query#clause + * @see lunr.Query#term + * @example query term with required presence + * query.term('foo', { presence: lunr.Query.presence.REQUIRED }) + */ +lunr.Query.presence = { + /** + * Term's presence in a document is optional, this is the default value. + */ + OPTIONAL: 1, + + /** + * Term's presence in a document is required, documents that do not contain + * this term will not be returned. + */ + REQUIRED: 2, + + /** + * Term's presence in a document is prohibited, documents that do contain + * this term will not be returned. + */ + PROHIBITED: 3 +} + +/** + * A single clause in a {@link lunr.Query} contains a term and details on how to + * match that term against a {@link lunr.Index}. + * + * @typedef {Object} lunr.Query~Clause + * @property {string[]} fields - The fields in an index this clause should be matched against. + * @property {number} [boost=1] - Any boost that should be applied when matching this clause. + * @property {number} [editDistance] - Whether the term should have fuzzy matching applied, and how fuzzy the match should be. + * @property {boolean} [usePipeline] - Whether the term should be passed through the search pipeline. + * @property {number} [wildcard=lunr.Query.wildcard.NONE] - Whether the term should have wildcards appended or prepended. + * @property {number} [presence=lunr.Query.presence.OPTIONAL] - The terms presence in any matching documents. + */ + +/** + * Adds a {@link lunr.Query~Clause} to this query. + * + * Unless the clause contains the fields to be matched all fields will be matched. In addition + * a default boost of 1 is applied to the clause. + * + * @param {lunr.Query~Clause} clause - The clause to add to this query. + * @see lunr.Query~Clause + * @returns {lunr.Query} + */ +lunr.Query.prototype.clause = function (clause) { + if (!('fields' in clause)) { + clause.fields = this.allFields + } + + if (!('boost' in clause)) { + clause.boost = 1 + } + + if (!('usePipeline' in clause)) { + clause.usePipeline = true + } + + if (!('wildcard' in clause)) { + clause.wildcard = lunr.Query.wildcard.NONE + } + + if ((clause.wildcard & lunr.Query.wildcard.LEADING) && (clause.term.charAt(0) != lunr.Query.wildcard)) { + clause.term = "*" + clause.term + } + + if ((clause.wildcard & lunr.Query.wildcard.TRAILING) && (clause.term.slice(-1) != lunr.Query.wildcard)) { + clause.term = "" + clause.term + "*" + } + + if (!('presence' in clause)) { + clause.presence = lunr.Query.presence.OPTIONAL + } + + this.clauses.push(clause) + + return this +} + +/** + * A negated query is one in which every clause has a presence of + * prohibited. These queries require some special processing to return + * the expected results. + * + * @returns boolean + */ +lunr.Query.prototype.isNegated = function () { + for (var i = 0; i < this.clauses.length; i++) { + if (this.clauses[i].presence != lunr.Query.presence.PROHIBITED) { + return false + } + } + + return true +} + +/** + * Adds a term to the current query, under the covers this will create a {@link lunr.Query~Clause} + * to the list of clauses that make up this query. + * + * The term is used as is, i.e. no tokenization will be performed by this method. Instead conversion + * to a token or token-like string should be done before calling this method. + * + * The term will be converted to a string by calling `toString`. Multiple terms can be passed as an + * array, each term in the array will share the same options. + * + * @param {object|object[]} term - The term(s) to add to the query. + * @param {object} [options] - Any additional properties to add to the query clause. + * @returns {lunr.Query} + * @see lunr.Query#clause + * @see lunr.Query~Clause + * @example adding a single term to a query + * query.term("foo") + * @example adding a single term to a query and specifying search fields, term boost and automatic trailing wildcard + * query.term("foo", { + * fields: ["title"], + * boost: 10, + * wildcard: lunr.Query.wildcard.TRAILING + * }) + * @example using lunr.tokenizer to convert a string to tokens before using them as terms + * query.term(lunr.tokenizer("foo bar")) + */ +lunr.Query.prototype.term = function (term, options) { + if (Array.isArray(term)) { + term.forEach(function (t) { this.term(t, lunr.utils.clone(options)) }, this) + return this + } + + var clause = options || {} + clause.term = term.toString() + + this.clause(clause) + + return this +} +lunr.QueryParseError = function (message, start, end) { + this.name = "QueryParseError" + this.message = message + this.start = start + this.end = end +} + +lunr.QueryParseError.prototype = new Error +lunr.QueryLexer = function (str) { + this.lexemes = [] + this.str = str + this.length = str.length + this.pos = 0 + this.start = 0 + this.escapeCharPositions = [] +} + +lunr.QueryLexer.prototype.run = function () { + var state = lunr.QueryLexer.lexText + + while (state) { + state = state(this) + } +} + +lunr.QueryLexer.prototype.sliceString = function () { + var subSlices = [], + sliceStart = this.start, + sliceEnd = this.pos + + for (var i = 0; i < this.escapeCharPositions.length; i++) { + sliceEnd = this.escapeCharPositions[i] + subSlices.push(this.str.slice(sliceStart, sliceEnd)) + sliceStart = sliceEnd + 1 + } + + subSlices.push(this.str.slice(sliceStart, this.pos)) + this.escapeCharPositions.length = 0 + + return subSlices.join('') +} + +lunr.QueryLexer.prototype.emit = function (type) { + this.lexemes.push({ + type: type, + str: this.sliceString(), + start: this.start, + end: this.pos + }) + + this.start = this.pos +} + +lunr.QueryLexer.prototype.escapeCharacter = function () { + this.escapeCharPositions.push(this.pos - 1) + this.pos += 1 +} + +lunr.QueryLexer.prototype.next = function () { + if (this.pos >= this.length) { + return lunr.QueryLexer.EOS + } + + var char = this.str.charAt(this.pos) + this.pos += 1 + return char +} + +lunr.QueryLexer.prototype.width = function () { + return this.pos - this.start +} + +lunr.QueryLexer.prototype.ignore = function () { + if (this.start == this.pos) { + this.pos += 1 + } + + this.start = this.pos +} + +lunr.QueryLexer.prototype.backup = function () { + this.pos -= 1 +} + +lunr.QueryLexer.prototype.acceptDigitRun = function () { + var char, charCode + + do { + char = this.next() + charCode = char.charCodeAt(0) + } while (charCode > 47 && charCode < 58) + + if (char != lunr.QueryLexer.EOS) { + this.backup() + } +} + +lunr.QueryLexer.prototype.more = function () { + return this.pos < this.length +} + +lunr.QueryLexer.EOS = 'EOS' +lunr.QueryLexer.FIELD = 'FIELD' +lunr.QueryLexer.TERM = 'TERM' +lunr.QueryLexer.EDIT_DISTANCE = 'EDIT_DISTANCE' +lunr.QueryLexer.BOOST = 'BOOST' +lunr.QueryLexer.PRESENCE = 'PRESENCE' + +lunr.QueryLexer.lexField = function (lexer) { + lexer.backup() + lexer.emit(lunr.QueryLexer.FIELD) + lexer.ignore() + return lunr.QueryLexer.lexText +} + +lunr.QueryLexer.lexTerm = function (lexer) { + if (lexer.width() > 1) { + lexer.backup() + lexer.emit(lunr.QueryLexer.TERM) + } + + lexer.ignore() + + if (lexer.more()) { + return lunr.QueryLexer.lexText + } +} + +lunr.QueryLexer.lexEditDistance = function (lexer) { + lexer.ignore() + lexer.acceptDigitRun() + lexer.emit(lunr.QueryLexer.EDIT_DISTANCE) + return lunr.QueryLexer.lexText +} + +lunr.QueryLexer.lexBoost = function (lexer) { + lexer.ignore() + lexer.acceptDigitRun() + lexer.emit(lunr.QueryLexer.BOOST) + return lunr.QueryLexer.lexText +} + +lunr.QueryLexer.lexEOS = function (lexer) { + if (lexer.width() > 0) { + lexer.emit(lunr.QueryLexer.TERM) + } +} + +// This matches the separator used when tokenising fields +// within a document. These should match otherwise it is +// not possible to search for some tokens within a document. +// +// It is possible for the user to change the separator on the +// tokenizer so it _might_ clash with any other of the special +// characters already used within the search string, e.g. :. +// +// This means that it is possible to change the separator in +// such a way that makes some words unsearchable using a search +// string. +lunr.QueryLexer.termSeparator = lunr.tokenizer.separator + +lunr.QueryLexer.lexText = function (lexer) { + while (true) { + var char = lexer.next() + + if (char == lunr.QueryLexer.EOS) { + return lunr.QueryLexer.lexEOS + } + + // Escape character is '\' + if (char.charCodeAt(0) == 92) { + lexer.escapeCharacter() + continue + } + + if (char == ":") { + return lunr.QueryLexer.lexField + } + + if (char == "~") { + lexer.backup() + if (lexer.width() > 0) { + lexer.emit(lunr.QueryLexer.TERM) + } + return lunr.QueryLexer.lexEditDistance + } + + if (char == "^") { + lexer.backup() + if (lexer.width() > 0) { + lexer.emit(lunr.QueryLexer.TERM) + } + return lunr.QueryLexer.lexBoost + } + + // "+" indicates term presence is required + // checking for length to ensure that only + // leading "+" are considered + if (char == "+" && lexer.width() === 1) { + lexer.emit(lunr.QueryLexer.PRESENCE) + return lunr.QueryLexer.lexText + } + + // "-" indicates term presence is prohibited + // checking for length to ensure that only + // leading "-" are considered + if (char == "-" && lexer.width() === 1) { + lexer.emit(lunr.QueryLexer.PRESENCE) + return lunr.QueryLexer.lexText + } + + if (char.match(lunr.QueryLexer.termSeparator)) { + return lunr.QueryLexer.lexTerm + } + } +} + +lunr.QueryParser = function (str, query) { + this.lexer = new lunr.QueryLexer (str) + this.query = query + this.currentClause = {} + this.lexemeIdx = 0 +} + +lunr.QueryParser.prototype.parse = function () { + this.lexer.run() + this.lexemes = this.lexer.lexemes + + var state = lunr.QueryParser.parseClause + + while (state) { + state = state(this) + } + + return this.query +} + +lunr.QueryParser.prototype.peekLexeme = function () { + return this.lexemes[this.lexemeIdx] +} + +lunr.QueryParser.prototype.consumeLexeme = function () { + var lexeme = this.peekLexeme() + this.lexemeIdx += 1 + return lexeme +} + +lunr.QueryParser.prototype.nextClause = function () { + var completedClause = this.currentClause + this.query.clause(completedClause) + this.currentClause = {} +} + +lunr.QueryParser.parseClause = function (parser) { + var lexeme = parser.peekLexeme() + + if (lexeme == undefined) { + return + } + + switch (lexeme.type) { + case lunr.QueryLexer.PRESENCE: + return lunr.QueryParser.parsePresence + case lunr.QueryLexer.FIELD: + return lunr.QueryParser.parseField + case lunr.QueryLexer.TERM: + return lunr.QueryParser.parseTerm + default: + var errorMessage = "expected either a field or a term, found " + lexeme.type + + if (lexeme.str.length >= 1) { + errorMessage += " with value '" + lexeme.str + "'" + } + + throw new lunr.QueryParseError (errorMessage, lexeme.start, lexeme.end) + } +} + +lunr.QueryParser.parsePresence = function (parser) { + var lexeme = parser.consumeLexeme() + + if (lexeme == undefined) { + return + } + + switch (lexeme.str) { + case "-": + parser.currentClause.presence = lunr.Query.presence.PROHIBITED + break + case "+": + parser.currentClause.presence = lunr.Query.presence.REQUIRED + break + default: + var errorMessage = "unrecognised presence operator'" + lexeme.str + "'" + throw new lunr.QueryParseError (errorMessage, lexeme.start, lexeme.end) + } + + var nextLexeme = parser.peekLexeme() + + if (nextLexeme == undefined) { + var errorMessage = "expecting term or field, found nothing" + throw new lunr.QueryParseError (errorMessage, lexeme.start, lexeme.end) + } + + switch (nextLexeme.type) { + case lunr.QueryLexer.FIELD: + return lunr.QueryParser.parseField + case lunr.QueryLexer.TERM: + return lunr.QueryParser.parseTerm + default: + var errorMessage = "expecting term or field, found '" + nextLexeme.type + "'" + throw new lunr.QueryParseError (errorMessage, nextLexeme.start, nextLexeme.end) + } +} + +lunr.QueryParser.parseField = function (parser) { + var lexeme = parser.consumeLexeme() + + if (lexeme == undefined) { + return + } + + if (parser.query.allFields.indexOf(lexeme.str) == -1) { + var possibleFields = parser.query.allFields.map(function (f) { return "'" + f + "'" }).join(', '), + errorMessage = "unrecognised field '" + lexeme.str + "', possible fields: " + possibleFields + + throw new lunr.QueryParseError (errorMessage, lexeme.start, lexeme.end) + } + + parser.currentClause.fields = [lexeme.str] + + var nextLexeme = parser.peekLexeme() + + if (nextLexeme == undefined) { + var errorMessage = "expecting term, found nothing" + throw new lunr.QueryParseError (errorMessage, lexeme.start, lexeme.end) + } + + switch (nextLexeme.type) { + case lunr.QueryLexer.TERM: + return lunr.QueryParser.parseTerm + default: + var errorMessage = "expecting term, found '" + nextLexeme.type + "'" + throw new lunr.QueryParseError (errorMessage, nextLexeme.start, nextLexeme.end) + } +} + +lunr.QueryParser.parseTerm = function (parser) { + var lexeme = parser.consumeLexeme() + + if (lexeme == undefined) { + return + } + + parser.currentClause.term = lexeme.str.toLowerCase() + + if (lexeme.str.indexOf("*") != -1) { + parser.currentClause.usePipeline = false + } + + var nextLexeme = parser.peekLexeme() + + if (nextLexeme == undefined) { + parser.nextClause() + return + } + + switch (nextLexeme.type) { + case lunr.QueryLexer.TERM: + parser.nextClause() + return lunr.QueryParser.parseTerm + case lunr.QueryLexer.FIELD: + parser.nextClause() + return lunr.QueryParser.parseField + case lunr.QueryLexer.EDIT_DISTANCE: + return lunr.QueryParser.parseEditDistance + case lunr.QueryLexer.BOOST: + return lunr.QueryParser.parseBoost + case lunr.QueryLexer.PRESENCE: + parser.nextClause() + return lunr.QueryParser.parsePresence + default: + var errorMessage = "Unexpected lexeme type '" + nextLexeme.type + "'" + throw new lunr.QueryParseError (errorMessage, nextLexeme.start, nextLexeme.end) + } +} + +lunr.QueryParser.parseEditDistance = function (parser) { + var lexeme = parser.consumeLexeme() + + if (lexeme == undefined) { + return + } + + var editDistance = parseInt(lexeme.str, 10) + + if (isNaN(editDistance)) { + var errorMessage = "edit distance must be numeric" + throw new lunr.QueryParseError (errorMessage, lexeme.start, lexeme.end) + } + + parser.currentClause.editDistance = editDistance + + var nextLexeme = parser.peekLexeme() + + if (nextLexeme == undefined) { + parser.nextClause() + return + } + + switch (nextLexeme.type) { + case lunr.QueryLexer.TERM: + parser.nextClause() + return lunr.QueryParser.parseTerm + case lunr.QueryLexer.FIELD: + parser.nextClause() + return lunr.QueryParser.parseField + case lunr.QueryLexer.EDIT_DISTANCE: + return lunr.QueryParser.parseEditDistance + case lunr.QueryLexer.BOOST: + return lunr.QueryParser.parseBoost + case lunr.QueryLexer.PRESENCE: + parser.nextClause() + return lunr.QueryParser.parsePresence + default: + var errorMessage = "Unexpected lexeme type '" + nextLexeme.type + "'" + throw new lunr.QueryParseError (errorMessage, nextLexeme.start, nextLexeme.end) + } +} + +lunr.QueryParser.parseBoost = function (parser) { + var lexeme = parser.consumeLexeme() + + if (lexeme == undefined) { + return + } + + var boost = parseInt(lexeme.str, 10) + + if (isNaN(boost)) { + var errorMessage = "boost must be numeric" + throw new lunr.QueryParseError (errorMessage, lexeme.start, lexeme.end) + } + + parser.currentClause.boost = boost + + var nextLexeme = parser.peekLexeme() + + if (nextLexeme == undefined) { + parser.nextClause() + return + } + + switch (nextLexeme.type) { + case lunr.QueryLexer.TERM: + parser.nextClause() + return lunr.QueryParser.parseTerm + case lunr.QueryLexer.FIELD: + parser.nextClause() + return lunr.QueryParser.parseField + case lunr.QueryLexer.EDIT_DISTANCE: + return lunr.QueryParser.parseEditDistance + case lunr.QueryLexer.BOOST: + return lunr.QueryParser.parseBoost + case lunr.QueryLexer.PRESENCE: + parser.nextClause() + return lunr.QueryParser.parsePresence + default: + var errorMessage = "Unexpected lexeme type '" + nextLexeme.type + "'" + throw new lunr.QueryParseError (errorMessage, nextLexeme.start, nextLexeme.end) + } +} + + /** + * export the module via AMD, CommonJS or as a browser global + * Export code from https://github.com/umdjs/umd/blob/master/returnExports.js + */ + ;(function (root, factory) { + if (typeof define === 'function' && define.amd) { + // AMD. Register as an anonymous module. + define(factory) + } else if (typeof exports === 'object') { + /** + * Node. Does not work with strict CommonJS, but + * only CommonJS-like environments that support module.exports, + * like Node. + */ + module.exports = factory() + } else { + // Browser globals (root is window) + root.lunr = factory() + } + }(this, function () { + /** + * Just return a value to define the module export. + * This example returns an object, but the module + * can return a function as the exported value. + */ + return lunr + })) +})(); diff --git a/search/main.js b/search/main.js new file mode 100644 index 00000000..a5e469d7 --- /dev/null +++ b/search/main.js @@ -0,0 +1,109 @@ +function getSearchTermFromLocation() { + var sPageURL = window.location.search.substring(1); + var sURLVariables = sPageURL.split('&'); + for (var i = 0; i < sURLVariables.length; i++) { + var sParameterName = sURLVariables[i].split('='); + if (sParameterName[0] == 'q') { + return decodeURIComponent(sParameterName[1].replace(/\+/g, '%20')); + } + } +} + +function joinUrl (base, path) { + if (path.substring(0, 1) === "/") { + // path starts with `/`. Thus it is absolute. + return path; + } + if (base.substring(base.length-1) === "/") { + // base ends with `/` + return base + path; + } + return base + "/" + path; +} + +function escapeHtml (value) { + return value.replace(/&/g, '&') + .replace(/"/g, '"') + .replace(//g, '>'); +} + +function formatResult (location, title, summary) { + return ''; +} + +function displayResults (results) { + var search_results = document.getElementById("mkdocs-search-results"); + while (search_results.firstChild) { + search_results.removeChild(search_results.firstChild); + } + if (results.length > 0){ + for (var i=0; i < results.length; i++){ + var result = results[i]; + var html = formatResult(result.location, result.title, result.summary); + search_results.insertAdjacentHTML('beforeend', html); + } + } else { + var noResultsText = search_results.getAttribute('data-no-results-text'); + if (!noResultsText) { + noResultsText = "No results found"; + } + search_results.insertAdjacentHTML('beforeend', '

' + noResultsText + '

'); + } +} + +function doSearch () { + var query = document.getElementById('mkdocs-search-query').value; + if (query.length > min_search_length) { + if (!window.Worker) { + displayResults(search(query)); + } else { + searchWorker.postMessage({query: query}); + } + } else { + // Clear results for short queries + displayResults([]); + } +} + +function initSearch () { + var search_input = document.getElementById('mkdocs-search-query'); + if (search_input) { + search_input.addEventListener("keyup", doSearch); + } + var term = getSearchTermFromLocation(); + if (term) { + search_input.value = term; + doSearch(); + } +} + +function onWorkerMessage (e) { + if (e.data.allowSearch) { + initSearch(); + } else if (e.data.results) { + var results = e.data.results; + displayResults(results); + } else if (e.data.config) { + min_search_length = e.data.config.min_search_length-1; + } +} + +if (!window.Worker) { + console.log('Web Worker API not supported'); + // load index in main thread + $.getScript(joinUrl(base_url, "search/worker.js")).done(function () { + console.log('Loaded worker'); + init(); + window.postMessage = function (msg) { + onWorkerMessage({data: msg}); + }; + }).fail(function (jqxhr, settings, exception) { + console.error('Could not load worker.js'); + }); +} else { + // Wrap search in a web worker + var searchWorker = new Worker(joinUrl(base_url, "search/worker.js")); + searchWorker.postMessage({init: true}); + searchWorker.onmessage = onWorkerMessage; +} diff --git a/search/search_index.json b/search/search_index.json new file mode 100644 index 00000000..c3bf89a8 --- /dev/null +++ b/search/search_index.json @@ -0,0 +1 @@ +{"config":{"indexing":"full","lang":["en"],"min_search_length":3,"prebuild_index":false,"separator":"[\\s\\-]+"},"docs":[{"location":"","text":"Welcome to the course: Introduction to Kebnekaise \u00b6 This material Here you will find the content of the workshop \u201cIntroduction to Kebnekaise\u201d. You can download the markdown files for the presentation as well as the exercises from https://github.com/hpc2n/intro-course Click the gren \u201cCode\u201d button Either copy the url for the repo under HTTPS and do git clone https://github.com/hpc2n/intro-course.git in a terminal window OR pick \u201cDownload zip\u201d to get a zip file with the content. Some useful links: Documentation about Linux at HPC2N: https://docs.hpc2n.umu.se/tutorials/linuxguide/ Get started guide: https://docs.hpc2n.umu.se/tutorials/quickstart/ Documentation pages at HPC2N: https://docs.hpc2n.umu.se/ Prerequisites Basic knowledge about Linux (if you need a refresher, you could take the course \u201cIntroduction to Linux\u201d which runs immediately before this course. Info and registration here: https://www.hpc2n.umu.se/events/courses/2024/fall/intro-linux . An account at SUPR and at HPC2N. You should have already been contacted about getting these if you did not have them already. Content This course aims to give a brief, but comprehensive introduction to Kebnekaise. You will learn about HPC2N, HPC, and Kebnekaise hardware How to use our systems: Logging in & editors The File System The Module System Compiling and linking The Batch System Simple examples (batch system) Application examples (batch system) This course will consist of lectures and type-alongs, as well as a few exercises where you get to try out what you have just learned. Instructors Birgitte Bryds\u00f6, HPC2N Pedro Ojeda May, HPC2N Preliminary schedule \u00b6 Time Topic Activity 11:15 Welcome+Syllabus 11:25 Introduction to Kebnekaise and HPC2N Lecture 11:45 Logging in & editors Lecture+exercise 11:55 The File System Lecture+code along 12:15 LUNCH BREAK 13:15 The Module System Lecture+code along 13:35 Compiling, compiler tool chains Lecture+code along+exercise 13:50 The Batch System Lecture+code along 14:10 Simple Examples Lecture+exercises 14:45 COFFEE BREAK 15:00 Application Examples Lecture+code along+exercises 16:40 Questions+Summary 17:00 END OF COURSE","title":"Home"},{"location":"#welcome__to__the__course__introduction__to__kebnekaise","text":"This material Here you will find the content of the workshop \u201cIntroduction to Kebnekaise\u201d. You can download the markdown files for the presentation as well as the exercises from https://github.com/hpc2n/intro-course Click the gren \u201cCode\u201d button Either copy the url for the repo under HTTPS and do git clone https://github.com/hpc2n/intro-course.git in a terminal window OR pick \u201cDownload zip\u201d to get a zip file with the content. Some useful links: Documentation about Linux at HPC2N: https://docs.hpc2n.umu.se/tutorials/linuxguide/ Get started guide: https://docs.hpc2n.umu.se/tutorials/quickstart/ Documentation pages at HPC2N: https://docs.hpc2n.umu.se/ Prerequisites Basic knowledge about Linux (if you need a refresher, you could take the course \u201cIntroduction to Linux\u201d which runs immediately before this course. Info and registration here: https://www.hpc2n.umu.se/events/courses/2024/fall/intro-linux . An account at SUPR and at HPC2N. You should have already been contacted about getting these if you did not have them already. Content This course aims to give a brief, but comprehensive introduction to Kebnekaise. You will learn about HPC2N, HPC, and Kebnekaise hardware How to use our systems: Logging in & editors The File System The Module System Compiling and linking The Batch System Simple examples (batch system) Application examples (batch system) This course will consist of lectures and type-alongs, as well as a few exercises where you get to try out what you have just learned. Instructors Birgitte Bryds\u00f6, HPC2N Pedro Ojeda May, HPC2N","title":"Welcome to the course: Introduction to Kebnekaise"},{"location":"#preliminary__schedule","text":"Time Topic Activity 11:15 Welcome+Syllabus 11:25 Introduction to Kebnekaise and HPC2N Lecture 11:45 Logging in & editors Lecture+exercise 11:55 The File System Lecture+code along 12:15 LUNCH BREAK 13:15 The Module System Lecture+code along 13:35 Compiling, compiler tool chains Lecture+code along+exercise 13:50 The Batch System Lecture+code along 14:10 Simple Examples Lecture+exercises 14:45 COFFEE BREAK 15:00 Application Examples Lecture+code along+exercises 16:40 Questions+Summary 17:00 END OF COURSE","title":"Preliminary schedule"},{"location":"batch/","text":"Compiling and Linking with Libraries \u00b6 \\begin{block}{} \\justify \\begin{small} Figuring out how to link \\end{small} \\end{block} \\begin{block}{} \\begin{itemize} \\item Intel and Intel MKL linking: \\ \\begin{tiny} \\texttt{https://software.intel.com/en-us/articles/intel-mkl-link-line-advisor} \\end{tiny} \\item GCC, etc. \\textbf{Use buildenv} \\begin{itemize} \\item After loading a compiler toolchain, load \\texttt{\u2018buildenv\u2019} and use \\texttt{\u2018ml show buildenv\u2019} to get useful linking info \\item Example, foss (add relevant version): \\ \\vspace{2mm} \\texttt{ml foss/version} \\ \\texttt{ml buildenv} \\ \\texttt{ml show buildenv} \\vspace{2mm} \\item Using the environment variable (prefaced with $) for linking is highly recommended! \\item You have to load the buildenv module in order to use the environment variable for linking! \\end{itemize} \\end{itemize} \\end{block} } \\frame{\\frametitle{The Batch System (SLURM)} \\begin{block}{} \\begin{itemize} \\item Large/long/parallel jobs \\textbf{must} be run through the batch system \\item SLURM is an Open Source job scheduler, which provides three key functions \\begin{itemize} \\item Keeps track of available system resources \\item Enforces local system resource usage and job scheduling policies \\item Manages a job queue, distributing work across resources according to policies \\end{itemize} \\item In order to run a batch job, you need to create and submit a SLURM submit file (also called a batch submit file, a batch script, or a job script). \\item Guides and documentation at: http://www.hpc2n.umu.se/support \\end{itemize} \\end{block} } \\frame{\\frametitle{The Batch System}\\framesubtitle{Accounting, Compute nodes, Kebnekaise} \\begin{block}{} \\begin{small} Here the Skylake nodes are used as an example. The only difference for the Broadwell nodes is that it would say 128G instead of 192G per node. \\end{small} \\end{block} \\begin{block}{} \\begin{center} \\includegraphics[width=9cm]{figures/Allocation-Kebnekaise-thin_skylake.png} \\end{center} \\end{block} } \\frame{\\frametitle{The Batch System}\\framesubtitle{Accounting, largemem nodes, Kebnekaise} \\begin{block}{} \\begin{center} \\includegraphics[width=10cm]{figures/Allocation-Kebnekaise-largemem_v3.png} \\end{center} \\end{block} } \\frame{\\frametitle{The Batch System}\\framesubtitle{Accounting, K80 GPU nodes, Kebnekaise.} \\begin{block}{} \\begin{footnotesize} The K80 GPU cards have 2 onboard compute engines (GK210 chips). Most GPU nodes have 2 K80s, placed together as 14 cores + 1 K80/socket. 4 GPU nodes have 4 K80 GPU cards. \\end{footnotesize} \\end{block} \\begin{block}{} \\begin{center} \\includegraphics[width=5.8cm]{figures/K80-GPUs.png} \\end{center} \\end{block} } \\frame{\\frametitle{The Batch System}\\framesubtitle{Accounting, V100 GPU nodes, Kebnekaise.} \\begin{block}{} \\begin{scriptsize} Each V100 GPU accelerator card has 1 onboard compute engine (GV100 chip). They are placed together as 14 cores + 1 V100 on a socket (28 cores, 2 V100s per node). \\end{scriptsize} \\end{block} \\begin{block}{} \\begin{center} \\includegraphics[width=6.8cm]{figures/V100-allocation-new.png} \\end{center} \\end{block} } \\frame{\\frametitle{The Batch System}\\framesubtitle{Accounting, A100 GPU nodes, Kebnekaise.} \\begin{block}{} \\begin{scriptsize} Each A100 GPU accelerator card has 1 onboard compute engine. The AMD Zen3 nodes have 2 CPUs sockets with 24 cores each, for a total of 48 cores, and 2 NVidia A100 GPUs. They are placed together as 24 cores + 1 A100 on a socket. \\end{scriptsize} \\end{block} \\begin{block}{} \\begin{center} \\includegraphics[width=6.8cm]{figures/A100-allocation.png} \\end{center} \\end{block} } \\frame{\\frametitle{The Batch System (SLURM)}\\framesubtitle{Useful Commands} \\begin{block}{} \\begin{itemize} \\begin{footnotesize} \\item Submit job: \\texttt{sbatch \\(<\\) jobscript \\(>\\) } \\item Get list of your jobs: \\texttt{squeue -u \\(<\\) username \\(>\\) } \\item \\texttt{srun \\(<\\) commands for your job/program \\(>\\) } \\item Check on a specific job: \\texttt{scontrol show job \\(<\\) job id \\(>\\) } \\item Delete a specific job: \\texttt{scancel \\(<\\) job id \\(>\\) } \\item Delete all your own jobs: \\texttt{scancel -u \\(<\\) user \\(>\\) } \\item More detailed info about jobs: \\ \\end{footnotesize} \\begin{scriptsize} \\texttt{sacct -l -j \\(<\\) jobid \\(>\\) -o jobname,NTasks,nodelist,MaxRSS,MaxVMSize\u2026} \\end{scriptsize} \\begin{itemize} \\begin{footnotesize} \\item More flags can be found with \\texttt{man sacct} \\item The output will be \\textbf{very} wide. To view, use \\ \\texttt{sacct -l -j ....... | less -S} \\ (makes it sideways scrollable, using the left/right arrow key) \\end{footnotesize} \\end{itemize} \\begin{footnotesize} \\item Web url with graphical info about a job: \\texttt{job-usage \\(<\\) job-id \\(>\\) } \\end{footnotesize} \\end{itemize} Use \\texttt{man sbatch, man srun, man ....} for more information \\end{block} } \\frame{\\frametitle{The Batch System (SLURM)}\\framesubtitle{Job Output} \\begin{block}{} \\begin{itemize} \\item Output and errors in: \\ \\texttt{slurm- \\(<\\) job id \\(>\\) .out} \\item Look at it with vi, nano, emacs, cat, less\u2026 \\item To get output and error files split up, you can give these flags in the submit script: \\ \\texttt{#SBATCH \u2013error=job.\\%J.err} \\ \\texttt{#SBATCH \u2013output=job.\\%J.out} \\end{itemize} \\end{block} } \\frame{\\frametitle{The Batch System (SLURM)}\\framesubtitle{Using different parts of Kebnekaise} \\begin{block}{} \\begin{scriptsize} \\begin{itemize} \\item Use the \u2018fat\u2019 nodes by adding this flag to your script: \\ \\texttt{#SBATCH -p largemem} (separate resource) \\ \\item Specifying Intel Broadwell, Intel Skylake, or AMD Zen3 CPUs: \\ \\texttt{#SBATCH \u2013constraint=broadwell} \\ or \\ \\texttt{#SBATCH \u2013constraint=skylake} \\ or \\ \\texttt{#SBATCH \u2013constraint=zen3} \\ \\item Using the GPU nodes (separate resource): \\ \\texttt{#SBATCH \u2013gres=gpu: \\(<\\) type-of-card \\(>\\) :x} where \\(<\\) type-of-card \\(>\\) is either k80, v100, or a100 and x = 1, 2, or 4 (4 only for K80). \\ \\begin{itemize} \\begin{scriptsize} \\item In the case of the A100 GPU nodes, you also need to add a partition \\ \\texttt{#SBATCH -p amd_gpu} \\end{scriptsize} \\end{itemize} \\item Use the AMD login node for correct modules and compilers for AMD Zen3 and A100 nodes: \\ \\texttt{kebnekaise-amd-tl.hpc2n.umu.se} or \\\\texttt{kebnekaise-amd.hpc2n.umu.se} \\end{itemize} More on https://www.hpc2n.umu.se/documentation/guides/using_kebnekaise \\end{scriptsize} \\end{block} } \\frame{\\frametitle{The Batch System (SLURM)}\\framesubtitle{Simple example, serial} \\begin{block}{} \\justify \\begin{footnotesize} Example: Serial job on Kebnekaise, compiler toolchain \u2018foss\u2019 \\end{footnotesize} \\end{block} \\begin{block}{} \\begin{footnotesize} \\texttt{#!/bin/bash} \\ \\texttt{# Project id - change to your own after the course!} \\ \\texttt{#SBATCH -A hpc2n2023-132} \\ \\texttt{# Asking for 1 core} \\ \\texttt{#SBATCH -n 1} \\ \\texttt{# Asking for a walltime of 5 min} \\ \\texttt{#SBATCH \u2013time=00:05:00} \\ \\vspace{3mm} \\texttt{# Purge modules before loading new ones in a script. } \\ \\texttt{ml purge > /dev/null 2>\\&1} \\ \\texttt{ml foss/2021b} \\ \\vspace{3mm} \\texttt{./my_serial_program} \\end{footnotesize} \\end{block} \\begin{block}{} \\justify \\begin{footnotesize} Submit with: \\ \\texttt{sbatch \\(<\\) jobscript \\(>\\) } \\end{footnotesize} \\end{block} } \\frame{\\frametitle{The Batch System (SLURM)}\\framesubtitle{Example, MPI C program} \\begin{block}{} \\begin{footnotesize} \\texttt{#include \\(<\\) stdio.h \\(>\\) } \\ \\texttt{#include \\(<\\) mpi.h \\(>\\) } \\ \\vspace{3mm} \\texttt{int main (int argc, char *argv[]) {} \\ \\vspace{3mm} \\texttt{int myrank, size;} \\ \\vspace{3mm} \\texttt{MPI_Init(\\&argc, \\&argv);} \\ \\texttt{MPI_Comm_rank(MPI_COMM_WORLD, \\&myrank);} \\ \\texttt{MPI_Comm_size(MPI_COMM_WORLD, \\&size);} \\ \\vspace{3mm} \\texttt{printf(\u201cProcessor \\%d of \\%d: Hello World!\\textbackslash n\u201d, myrank, size);} \\ \\vspace{3mm} \\texttt{MPI_Finalize();} \\vspace{3mm} \\texttt{}} \\end{footnotesize} \\end{block} } \\frame{\\frametitle{The Batch System (SLURM)}\\framesubtitle{Simple example, parallel} \\begin{block}{} \\justify \\begin{footnotesize} Example: MPI job on Kebnekaise, compiler toolchain \u2018foss\u2019 \\end{footnotesize} \\end{block} \\begin{block}{} \\begin{footnotesize} \\texttt{#!/bin/bash} \\ \\texttt{#SBATCH -A hpc2n2023-132} \\ \\texttt{#SBATCH -n 14} \\ \\texttt{#SBATCH \u2013time=00:05:00} \\ \\texttt{##SBATCH \u2013exclusive} \\ \\texttt{#SBATCH \u2013reservation=intro-gpu} \\ \\vspace{3mm} \\texttt{module purge > /dev/null 2>\\&1} \\ \\texttt{ml foss/2021b} \\ \\vspace{3mm} \\texttt{srun ./my_parallel_program} \\end{footnotesize} \\end{block} } \\begin{frame}[fragile]\\frametitle{The Batch System (SLURM)}\\framesubtitle{Simple example, output} \\begin{block}{} \\justify Example: Output from a MPI job on Kebnekaise, run on 14 cores (one NUMA island) \\end{block} \\begin{block}{} \\begin{tiny} \\begin{verbatim} b-an01 [~/slurm]$ cat slurm-15952.out Processor 12 of 14: Hello World! Processor 5 of 14: Hello World! Processor 9 of 14: Hello World! Processor 4 of 14: Hello World! Processor 11 of 14: Hello World! Processor 13 of 14: Hello World! Processor 0 of 14: Hello World! Processor 1 of 14: Hello World! Processor 2 of 14: Hello World! Processor 3 of 14: Hello World! Processor 6 of 14: Hello World! Processor 7 of 14: Hello World! Processor 8 of 14: Hello World! Processor 10 of 14: Hello World! \\end{verbatim} \\end{tiny} \\end{block} \\end{frame} \\frame{\\frametitle{The Batch System (SLURM)}\\framesubtitle{Starting more than one serial job in the same submit file} \\begin{block}{} \\begin{small} \\texttt{#!/bin/bash} \\ \\texttt{#SBATCH -A hpc2n2023-132} \\ \\texttt{#SBATCH -n 5} \\ \\texttt{#SBATCH \u2013time=00:15:00} \\ \\vspace{3mm} \\texttt{module purge > /dev/null 2>\\&1} \\ \\texttt{ml foss/2021b} \\ \\vspace{3mm} \\texttt{srun -n 1 ./job1.batch \\&} \\ \\texttt{srun -n 1 ./job2.batch \\&} \\ \\texttt{srun -n 1 ./job3.batch \\&} \\ \\texttt{srun -n 1 ./job4.batch \\&} \\ \\texttt{srun -n 1 ./job5.batch } \\ \\texttt{wait} \\ \\end{small} \\end{block} } \\frame{\\frametitle{The Batch System (SLURM)}\\framesubtitle{Multiple Parallel Jobs Sequentially} \\begin{block}{} \\begin{scriptsize} \\texttt{#!/bin/bash} \\ \\texttt{#SBATCH -A hpc2n2023-132} \\ \\texttt{#SBATCH -c 28} \\ \\texttt{# Remember to ask for enough time for all jobs to complete} \\ \\texttt{#SBATCH \u2013time=02:00:00} \\ \\vspace{3mm} \\texttt{module purge > /dev/null 2>\\&1} \\ \\texttt{ml foss/2021b} \\ \\vspace{3mm} \\texttt{# Here 14 tasks with 2 cores per task. Output to file.} \\ \\texttt{# Not needed if your job creates output in a file} \\ \\texttt{# I also copy the output somewhere else and then run} \\ \\texttt{# another executable\u2026} \\ \\vspace{3mm} \\texttt{srun -n 14 -c 2 ./a.out > myoutput1 2>\\&1} \\ \\texttt{cp myoutput1 /pfs/nobackup/home/u/username/mydatadir} \\ \\texttt{srun -n 14 -c 2 ./b.out > myoutput2 2>\\&1} \\ \\texttt{cp myoutput2 /pfs/nobackup/home/u/username/mydatadir} \\ \\texttt{srun -n 14 -c 2 ./c.out > myoutput3 2>\\&1} \\ \\texttt{cp myoutput3 /pfs/nobackup/home/u/username/mydatadir} \\ \\end{scriptsize} \\end{block} } \\frame{\\frametitle{The Batch System (SLURM)}\\framesubtitle{Multiple Parallel Jobs Simultaneously} \\[\\begin{footnotesize} Make sure you ask for enough cores that all jobs can run at the same time, and have enough memory. Of course, this will also work for serial jobs - just remove the srun from the command line. \\end{footnotesize}\\] \\begin{block}{} \\begin{footnotesize} \\texttt{#!/bin/bash} \\ \\texttt{#SBATCH -A hpc2n2023-132} \\ \\texttt{# Total number of cores the jobs need} \\ \\texttt{#SBATCH -n 56} \\ \\texttt{# Remember to ask for enough time for all of the jobs to} \\ \\texttt{# complete, even the longest} \\ \\texttt{#SBATCH \u2013time=02:00:00} \\ \\vspace{3mm} \\texttt{module purge > /dev/null 2>\\&1} \\ \\texttt{ml foss/2021b} \\ \\vspace{3mm} \\texttt{srun -n 14 \u2013cpu_bind=cores ./a.out \\&} \\ \\texttt{srun -n 28 \u2013cpu_bind=cores ./b.out \\&} \\ \\texttt{srun -n 14 \u2013cpu_bind=cores ./c.out \\&} \\ \\texttt{\u2026} \\ \\texttt{wait} \\ \\end{footnotesize} \\end{block} } \\frame{\\frametitle{The Batch System (SLURM)}\\framesubtitle{GPU Job - V100} \\begin{block}{} \\begin{footnotesize} \\texttt{#!/bin/bash} \\ \\texttt{#SBATCH -A hpc2n2023-132} \\ \\texttt{# Expected time for job to complete} \\ \\texttt{#SBATCH \u2013time=00:10:00} \\ \\texttt{# Number of GPU cards needed. Here asking for 2 V100 cards} \\ \\texttt{#SBATCH \u2013gres=v100:2} \\ \\vspace{3mm} \\texttt{module purge > /dev/null 2>\\&1} \\ \\texttt{# Change to modules needed for your program} \\ \\texttt{ml fosscuda/2021b} \\ \\vspace{3mm} \\texttt{./my-cuda-program} \\ \\end{footnotesize} \\end{block} } \\frame{\\frametitle{The Batch System (SLURM)}\\framesubtitle{GPU Job - A100} \\begin{block}{} \\begin{footnotesize} \\texttt{#!/bin/bash} \\ \\texttt{#SBATCH -A hpc2n2023-132} \\ \\texttt{# Expected time for job to complete} \\ \\texttt{#SBATCH \u2013time=00:10:00} \\ \\texttt{# Adding the partition for the A100 GPUs} \\ \\texttt{#SBATCH -p amd_gpu} \\ \\texttt{# Number of GPU cards needed. Here asking for 2 A100 cards} \\ \\texttt{#SBATCH \u2013gres=a100:2} \\ \\vspace{3mm} \\texttt{module purge > /dev/null 2>\\&1} \\ \\texttt{# Change to modules needed for your software - remember to login} \\ \\texttt{# to kebnekaise-amd.hpc2n.umu.se or} \\ \\texttt{# kebnekaise-amd-tl.hpc2n.umu.se login node to see availability} \\ \\texttt{ml CUDA/11.7.0} \\ \\vspace{3mm} \\texttt{./my-cuda-program} \\ \\end{footnotesize} \\end{block} } \\frame{\\frametitle{Important information} \\begin{block}{} \\begin{itemize} \\begin{small} \\item The course project has the following project ID: hpc2n2023-132 \\item In order to use it in a batch job, add this to the batch script: \\begin{itemize} \\begin{small} \\item #SBATCH -A hpc2n2023-132 \\end{small} \\end{itemize} \\item There is a reservation with one A100 GPU node reserved for the course, in order to let us run small GPU examples without having to wait for too long. The reservation also is for one Broadwell CPU node. \\item The reservation is ONLY valid during the course: \\begin{itemize} \\begin{small} \\item intro-gpu \\ (add with #SBATCH \u2013reservation=intro-gpu) \\end{small} \\item To use the reservation with the A100 GPU node, also add \\texttt{#SBATCH -p amd_gpu} and \\texttt{#SBATCH \u2013gres=a100:x (for x=1,2)}. \\end{itemize} \\item We have a storage project linked to the compute project. It is hpc2n2023-132. You find it in /proj/nobackup/hpc2n2023-132. Remember to create your own directory under it. \\end{small} \\end{itemize} \\end{block} } \\frame{\\frametitle{Questions and support} \\begin{block}{} \\textbf{Questions?} Now: Ask me or one of the other support or application experts present. \\vspace{0.5cm} OR \\vspace{0.5cm} \\begin{itemize} \\item Documentation: \\texttt{https://www.hpc2n.umu.se/support} \\item Support questions to: \\texttt{https://supr.naiss.se/support/} or \\texttt{support@hpc2n.umu.se} \\end{itemize} \\end{block} } \\end{document}","title":"The Batch System"},{"location":"batch/#compiling__and__linking__with__libraries","text":"\\begin{block}{} \\justify \\begin{small} Figuring out how to link \\end{small} \\end{block} \\begin{block}{} \\begin{itemize} \\item Intel and Intel MKL linking: \\ \\begin{tiny} \\texttt{https://software.intel.com/en-us/articles/intel-mkl-link-line-advisor} \\end{tiny} \\item GCC, etc. \\textbf{Use buildenv} \\begin{itemize} \\item After loading a compiler toolchain, load \\texttt{\u2018buildenv\u2019} and use \\texttt{\u2018ml show buildenv\u2019} to get useful linking info \\item Example, foss (add relevant version): \\ \\vspace{2mm} \\texttt{ml foss/version} \\ \\texttt{ml buildenv} \\ \\texttt{ml show buildenv} \\vspace{2mm} \\item Using the environment variable (prefaced with $) for linking is highly recommended! \\item You have to load the buildenv module in order to use the environment variable for linking! \\end{itemize} \\end{itemize} \\end{block} } \\frame{\\frametitle{The Batch System (SLURM)} \\begin{block}{} \\begin{itemize} \\item Large/long/parallel jobs \\textbf{must} be run through the batch system \\item SLURM is an Open Source job scheduler, which provides three key functions \\begin{itemize} \\item Keeps track of available system resources \\item Enforces local system resource usage and job scheduling policies \\item Manages a job queue, distributing work across resources according to policies \\end{itemize} \\item In order to run a batch job, you need to create and submit a SLURM submit file (also called a batch submit file, a batch script, or a job script). \\item Guides and documentation at: http://www.hpc2n.umu.se/support \\end{itemize} \\end{block} } \\frame{\\frametitle{The Batch System}\\framesubtitle{Accounting, Compute nodes, Kebnekaise} \\begin{block}{} \\begin{small} Here the Skylake nodes are used as an example. The only difference for the Broadwell nodes is that it would say 128G instead of 192G per node. \\end{small} \\end{block} \\begin{block}{} \\begin{center} \\includegraphics[width=9cm]{figures/Allocation-Kebnekaise-thin_skylake.png} \\end{center} \\end{block} } \\frame{\\frametitle{The Batch System}\\framesubtitle{Accounting, largemem nodes, Kebnekaise} \\begin{block}{} \\begin{center} \\includegraphics[width=10cm]{figures/Allocation-Kebnekaise-largemem_v3.png} \\end{center} \\end{block} } \\frame{\\frametitle{The Batch System}\\framesubtitle{Accounting, K80 GPU nodes, Kebnekaise.} \\begin{block}{} \\begin{footnotesize} The K80 GPU cards have 2 onboard compute engines (GK210 chips). Most GPU nodes have 2 K80s, placed together as 14 cores + 1 K80/socket. 4 GPU nodes have 4 K80 GPU cards. \\end{footnotesize} \\end{block} \\begin{block}{} \\begin{center} \\includegraphics[width=5.8cm]{figures/K80-GPUs.png} \\end{center} \\end{block} } \\frame{\\frametitle{The Batch System}\\framesubtitle{Accounting, V100 GPU nodes, Kebnekaise.} \\begin{block}{} \\begin{scriptsize} Each V100 GPU accelerator card has 1 onboard compute engine (GV100 chip). They are placed together as 14 cores + 1 V100 on a socket (28 cores, 2 V100s per node). \\end{scriptsize} \\end{block} \\begin{block}{} \\begin{center} \\includegraphics[width=6.8cm]{figures/V100-allocation-new.png} \\end{center} \\end{block} } \\frame{\\frametitle{The Batch System}\\framesubtitle{Accounting, A100 GPU nodes, Kebnekaise.} \\begin{block}{} \\begin{scriptsize} Each A100 GPU accelerator card has 1 onboard compute engine. The AMD Zen3 nodes have 2 CPUs sockets with 24 cores each, for a total of 48 cores, and 2 NVidia A100 GPUs. They are placed together as 24 cores + 1 A100 on a socket. \\end{scriptsize} \\end{block} \\begin{block}{} \\begin{center} \\includegraphics[width=6.8cm]{figures/A100-allocation.png} \\end{center} \\end{block} } \\frame{\\frametitle{The Batch System (SLURM)}\\framesubtitle{Useful Commands} \\begin{block}{} \\begin{itemize} \\begin{footnotesize} \\item Submit job: \\texttt{sbatch \\(<\\) jobscript \\(>\\) } \\item Get list of your jobs: \\texttt{squeue -u \\(<\\) username \\(>\\) } \\item \\texttt{srun \\(<\\) commands for your job/program \\(>\\) } \\item Check on a specific job: \\texttt{scontrol show job \\(<\\) job id \\(>\\) } \\item Delete a specific job: \\texttt{scancel \\(<\\) job id \\(>\\) } \\item Delete all your own jobs: \\texttt{scancel -u \\(<\\) user \\(>\\) } \\item More detailed info about jobs: \\ \\end{footnotesize} \\begin{scriptsize} \\texttt{sacct -l -j \\(<\\) jobid \\(>\\) -o jobname,NTasks,nodelist,MaxRSS,MaxVMSize\u2026} \\end{scriptsize} \\begin{itemize} \\begin{footnotesize} \\item More flags can be found with \\texttt{man sacct} \\item The output will be \\textbf{very} wide. To view, use \\ \\texttt{sacct -l -j ....... | less -S} \\ (makes it sideways scrollable, using the left/right arrow key) \\end{footnotesize} \\end{itemize} \\begin{footnotesize} \\item Web url with graphical info about a job: \\texttt{job-usage \\(<\\) job-id \\(>\\) } \\end{footnotesize} \\end{itemize} Use \\texttt{man sbatch, man srun, man ....} for more information \\end{block} } \\frame{\\frametitle{The Batch System (SLURM)}\\framesubtitle{Job Output} \\begin{block}{} \\begin{itemize} \\item Output and errors in: \\ \\texttt{slurm- \\(<\\) job id \\(>\\) .out} \\item Look at it with vi, nano, emacs, cat, less\u2026 \\item To get output and error files split up, you can give these flags in the submit script: \\ \\texttt{#SBATCH \u2013error=job.\\%J.err} \\ \\texttt{#SBATCH \u2013output=job.\\%J.out} \\end{itemize} \\end{block} } \\frame{\\frametitle{The Batch System (SLURM)}\\framesubtitle{Using different parts of Kebnekaise} \\begin{block}{} \\begin{scriptsize} \\begin{itemize} \\item Use the \u2018fat\u2019 nodes by adding this flag to your script: \\ \\texttt{#SBATCH -p largemem} (separate resource) \\ \\item Specifying Intel Broadwell, Intel Skylake, or AMD Zen3 CPUs: \\ \\texttt{#SBATCH \u2013constraint=broadwell} \\ or \\ \\texttt{#SBATCH \u2013constraint=skylake} \\ or \\ \\texttt{#SBATCH \u2013constraint=zen3} \\ \\item Using the GPU nodes (separate resource): \\ \\texttt{#SBATCH \u2013gres=gpu: \\(<\\) type-of-card \\(>\\) :x} where \\(<\\) type-of-card \\(>\\) is either k80, v100, or a100 and x = 1, 2, or 4 (4 only for K80). \\ \\begin{itemize} \\begin{scriptsize} \\item In the case of the A100 GPU nodes, you also need to add a partition \\ \\texttt{#SBATCH -p amd_gpu} \\end{scriptsize} \\end{itemize} \\item Use the AMD login node for correct modules and compilers for AMD Zen3 and A100 nodes: \\ \\texttt{kebnekaise-amd-tl.hpc2n.umu.se} or \\\\texttt{kebnekaise-amd.hpc2n.umu.se} \\end{itemize} More on https://www.hpc2n.umu.se/documentation/guides/using_kebnekaise \\end{scriptsize} \\end{block} } \\frame{\\frametitle{The Batch System (SLURM)}\\framesubtitle{Simple example, serial} \\begin{block}{} \\justify \\begin{footnotesize} Example: Serial job on Kebnekaise, compiler toolchain \u2018foss\u2019 \\end{footnotesize} \\end{block} \\begin{block}{} \\begin{footnotesize} \\texttt{#!/bin/bash} \\ \\texttt{# Project id - change to your own after the course!} \\ \\texttt{#SBATCH -A hpc2n2023-132} \\ \\texttt{# Asking for 1 core} \\ \\texttt{#SBATCH -n 1} \\ \\texttt{# Asking for a walltime of 5 min} \\ \\texttt{#SBATCH \u2013time=00:05:00} \\ \\vspace{3mm} \\texttt{# Purge modules before loading new ones in a script. } \\ \\texttt{ml purge > /dev/null 2>\\&1} \\ \\texttt{ml foss/2021b} \\ \\vspace{3mm} \\texttt{./my_serial_program} \\end{footnotesize} \\end{block} \\begin{block}{} \\justify \\begin{footnotesize} Submit with: \\ \\texttt{sbatch \\(<\\) jobscript \\(>\\) } \\end{footnotesize} \\end{block} } \\frame{\\frametitle{The Batch System (SLURM)}\\framesubtitle{Example, MPI C program} \\begin{block}{} \\begin{footnotesize} \\texttt{#include \\(<\\) stdio.h \\(>\\) } \\ \\texttt{#include \\(<\\) mpi.h \\(>\\) } \\ \\vspace{3mm} \\texttt{int main (int argc, char *argv[]) {} \\ \\vspace{3mm} \\texttt{int myrank, size;} \\ \\vspace{3mm} \\texttt{MPI_Init(\\&argc, \\&argv);} \\ \\texttt{MPI_Comm_rank(MPI_COMM_WORLD, \\&myrank);} \\ \\texttt{MPI_Comm_size(MPI_COMM_WORLD, \\&size);} \\ \\vspace{3mm} \\texttt{printf(\u201cProcessor \\%d of \\%d: Hello World!\\textbackslash n\u201d, myrank, size);} \\ \\vspace{3mm} \\texttt{MPI_Finalize();} \\vspace{3mm} \\texttt{}} \\end{footnotesize} \\end{block} } \\frame{\\frametitle{The Batch System (SLURM)}\\framesubtitle{Simple example, parallel} \\begin{block}{} \\justify \\begin{footnotesize} Example: MPI job on Kebnekaise, compiler toolchain \u2018foss\u2019 \\end{footnotesize} \\end{block} \\begin{block}{} \\begin{footnotesize} \\texttt{#!/bin/bash} \\ \\texttt{#SBATCH -A hpc2n2023-132} \\ \\texttt{#SBATCH -n 14} \\ \\texttt{#SBATCH \u2013time=00:05:00} \\ \\texttt{##SBATCH \u2013exclusive} \\ \\texttt{#SBATCH \u2013reservation=intro-gpu} \\ \\vspace{3mm} \\texttt{module purge > /dev/null 2>\\&1} \\ \\texttt{ml foss/2021b} \\ \\vspace{3mm} \\texttt{srun ./my_parallel_program} \\end{footnotesize} \\end{block} } \\begin{frame}[fragile]\\frametitle{The Batch System (SLURM)}\\framesubtitle{Simple example, output} \\begin{block}{} \\justify Example: Output from a MPI job on Kebnekaise, run on 14 cores (one NUMA island) \\end{block} \\begin{block}{} \\begin{tiny} \\begin{verbatim} b-an01 [~/slurm]$ cat slurm-15952.out Processor 12 of 14: Hello World! Processor 5 of 14: Hello World! Processor 9 of 14: Hello World! Processor 4 of 14: Hello World! Processor 11 of 14: Hello World! Processor 13 of 14: Hello World! Processor 0 of 14: Hello World! Processor 1 of 14: Hello World! Processor 2 of 14: Hello World! Processor 3 of 14: Hello World! Processor 6 of 14: Hello World! Processor 7 of 14: Hello World! Processor 8 of 14: Hello World! Processor 10 of 14: Hello World! \\end{verbatim} \\end{tiny} \\end{block} \\end{frame} \\frame{\\frametitle{The Batch System (SLURM)}\\framesubtitle{Starting more than one serial job in the same submit file} \\begin{block}{} \\begin{small} \\texttt{#!/bin/bash} \\ \\texttt{#SBATCH -A hpc2n2023-132} \\ \\texttt{#SBATCH -n 5} \\ \\texttt{#SBATCH \u2013time=00:15:00} \\ \\vspace{3mm} \\texttt{module purge > /dev/null 2>\\&1} \\ \\texttt{ml foss/2021b} \\ \\vspace{3mm} \\texttt{srun -n 1 ./job1.batch \\&} \\ \\texttt{srun -n 1 ./job2.batch \\&} \\ \\texttt{srun -n 1 ./job3.batch \\&} \\ \\texttt{srun -n 1 ./job4.batch \\&} \\ \\texttt{srun -n 1 ./job5.batch } \\ \\texttt{wait} \\ \\end{small} \\end{block} } \\frame{\\frametitle{The Batch System (SLURM)}\\framesubtitle{Multiple Parallel Jobs Sequentially} \\begin{block}{} \\begin{scriptsize} \\texttt{#!/bin/bash} \\ \\texttt{#SBATCH -A hpc2n2023-132} \\ \\texttt{#SBATCH -c 28} \\ \\texttt{# Remember to ask for enough time for all jobs to complete} \\ \\texttt{#SBATCH \u2013time=02:00:00} \\ \\vspace{3mm} \\texttt{module purge > /dev/null 2>\\&1} \\ \\texttt{ml foss/2021b} \\ \\vspace{3mm} \\texttt{# Here 14 tasks with 2 cores per task. Output to file.} \\ \\texttt{# Not needed if your job creates output in a file} \\ \\texttt{# I also copy the output somewhere else and then run} \\ \\texttt{# another executable\u2026} \\ \\vspace{3mm} \\texttt{srun -n 14 -c 2 ./a.out > myoutput1 2>\\&1} \\ \\texttt{cp myoutput1 /pfs/nobackup/home/u/username/mydatadir} \\ \\texttt{srun -n 14 -c 2 ./b.out > myoutput2 2>\\&1} \\ \\texttt{cp myoutput2 /pfs/nobackup/home/u/username/mydatadir} \\ \\texttt{srun -n 14 -c 2 ./c.out > myoutput3 2>\\&1} \\ \\texttt{cp myoutput3 /pfs/nobackup/home/u/username/mydatadir} \\ \\end{scriptsize} \\end{block} } \\frame{\\frametitle{The Batch System (SLURM)}\\framesubtitle{Multiple Parallel Jobs Simultaneously} \\[\\begin{footnotesize} Make sure you ask for enough cores that all jobs can run at the same time, and have enough memory. Of course, this will also work for serial jobs - just remove the srun from the command line. \\end{footnotesize}\\] \\begin{block}{} \\begin{footnotesize} \\texttt{#!/bin/bash} \\ \\texttt{#SBATCH -A hpc2n2023-132} \\ \\texttt{# Total number of cores the jobs need} \\ \\texttt{#SBATCH -n 56} \\ \\texttt{# Remember to ask for enough time for all of the jobs to} \\ \\texttt{# complete, even the longest} \\ \\texttt{#SBATCH \u2013time=02:00:00} \\ \\vspace{3mm} \\texttt{module purge > /dev/null 2>\\&1} \\ \\texttt{ml foss/2021b} \\ \\vspace{3mm} \\texttt{srun -n 14 \u2013cpu_bind=cores ./a.out \\&} \\ \\texttt{srun -n 28 \u2013cpu_bind=cores ./b.out \\&} \\ \\texttt{srun -n 14 \u2013cpu_bind=cores ./c.out \\&} \\ \\texttt{\u2026} \\ \\texttt{wait} \\ \\end{footnotesize} \\end{block} } \\frame{\\frametitle{The Batch System (SLURM)}\\framesubtitle{GPU Job - V100} \\begin{block}{} \\begin{footnotesize} \\texttt{#!/bin/bash} \\ \\texttt{#SBATCH -A hpc2n2023-132} \\ \\texttt{# Expected time for job to complete} \\ \\texttt{#SBATCH \u2013time=00:10:00} \\ \\texttt{# Number of GPU cards needed. Here asking for 2 V100 cards} \\ \\texttt{#SBATCH \u2013gres=v100:2} \\ \\vspace{3mm} \\texttt{module purge > /dev/null 2>\\&1} \\ \\texttt{# Change to modules needed for your program} \\ \\texttt{ml fosscuda/2021b} \\ \\vspace{3mm} \\texttt{./my-cuda-program} \\ \\end{footnotesize} \\end{block} } \\frame{\\frametitle{The Batch System (SLURM)}\\framesubtitle{GPU Job - A100} \\begin{block}{} \\begin{footnotesize} \\texttt{#!/bin/bash} \\ \\texttt{#SBATCH -A hpc2n2023-132} \\ \\texttt{# Expected time for job to complete} \\ \\texttt{#SBATCH \u2013time=00:10:00} \\ \\texttt{# Adding the partition for the A100 GPUs} \\ \\texttt{#SBATCH -p amd_gpu} \\ \\texttt{# Number of GPU cards needed. Here asking for 2 A100 cards} \\ \\texttt{#SBATCH \u2013gres=a100:2} \\ \\vspace{3mm} \\texttt{module purge > /dev/null 2>\\&1} \\ \\texttt{# Change to modules needed for your software - remember to login} \\ \\texttt{# to kebnekaise-amd.hpc2n.umu.se or} \\ \\texttt{# kebnekaise-amd-tl.hpc2n.umu.se login node to see availability} \\ \\texttt{ml CUDA/11.7.0} \\ \\vspace{3mm} \\texttt{./my-cuda-program} \\ \\end{footnotesize} \\end{block} } \\frame{\\frametitle{Important information} \\begin{block}{} \\begin{itemize} \\begin{small} \\item The course project has the following project ID: hpc2n2023-132 \\item In order to use it in a batch job, add this to the batch script: \\begin{itemize} \\begin{small} \\item #SBATCH -A hpc2n2023-132 \\end{small} \\end{itemize} \\item There is a reservation with one A100 GPU node reserved for the course, in order to let us run small GPU examples without having to wait for too long. The reservation also is for one Broadwell CPU node. \\item The reservation is ONLY valid during the course: \\begin{itemize} \\begin{small} \\item intro-gpu \\ (add with #SBATCH \u2013reservation=intro-gpu) \\end{small} \\item To use the reservation with the A100 GPU node, also add \\texttt{#SBATCH -p amd_gpu} and \\texttt{#SBATCH \u2013gres=a100:x (for x=1,2)}. \\end{itemize} \\item We have a storage project linked to the compute project. It is hpc2n2023-132. You find it in /proj/nobackup/hpc2n2023-132. Remember to create your own directory under it. \\end{small} \\end{itemize} \\end{block} } \\frame{\\frametitle{Questions and support} \\begin{block}{} \\textbf{Questions?} Now: Ask me or one of the other support or application experts present. \\vspace{0.5cm} OR \\vspace{0.5cm} \\begin{itemize} \\item Documentation: \\texttt{https://www.hpc2n.umu.se/support} \\item Support questions to: \\texttt{https://supr.naiss.se/support/} or \\texttt{support@hpc2n.umu.se} \\end{itemize} \\end{block} } \\end{document}","title":"Compiling and Linking with Libraries"},{"location":"compilers/","text":"Compiling and Linking with Libraries \u00b6 Objectives Learn about the compilers at HPC2N How to load the compiler toolchains How to use the compilers What are the popular flags How to link with libraries. Installed compilers \u00b6 There are compilers available for Fortran 77, Fortran 90, Fortran 95, C, and C++. The compilers can produce both general-purpose code and architecture-specific optimized code to improve performance (loop-level optimizations, inter-procedural analysis and cache optimizations). Loading compilers \u00b6 Note You need to load a compiler suite (and possibly libraries, depending on what you need) before you can compile and link. Use ml av to get a list of available compiler toolchains as mentioned in the modules - compiler toolchains section. You load a compiler toolchain the same way you load any other module. They are always available directly, without the need to load prerequisites first. Example: Loading foss/2023b This compiler toolchain contains: GCC/13.2.0 , BLAS (with LAPACK , ScaLAPACK , and FFTW . b-an01 [ ~ ] $ ml foss/2023b b-an01 [ ~ ] $ ml Currently Loaded Modules: 1 ) snicenvironment ( S ) 7 ) numactl/2.0.16 13 ) libevent/2.1.12 19 ) FlexiBLAS/3.3.1 2 ) systemdefault ( S ) 8 ) XZ/5.4.4 14 ) UCX/1.15.0 20 ) FFTW/3.3.10 3 ) GCCcore/13.2.0 9 ) libxml2/2.11.5 15 ) PMIx/4.2.6 21 ) FFTW.MPI/3.3.10 4 ) zlib/1.2.13 10 ) libpciaccess/0.17 16 ) UCC/1.2.0 22 ) ScaLAPACK/2.2.0-fb 5 ) binutils/2.40 11 ) hwloc/2.9.2 17 ) OpenMPI/4.1.6 23 ) foss/2023b 6 ) GCC/13.2.0 12 ) OpenSSL/1.1 18 ) OpenBLAS/0.3.24 Where: S: Module is Sticky, requires --force to unload or purge b-an01 [ ~ ] $ Compiling \u00b6 Compiling with GCC \u00b6 Language Compiler name MPI Fortran77 gfortran mpif77 Fortran90 gfortran mpif90 Fortran95 gfortran N/A C gcc mpicc C++ g++ mpiCC Example: compiling hello.c You can find the file hello.c in the exercises directory, in the subdirectory \u201csimple\u201d. Or you can download it here: \\) } \\item Get list of your jobs: \\texttt{squeue -u \\(<\\) username \\(>\\) } \\item \\texttt{srun \\(<\\) commands for your job/program \\(>\\) } \\item Check on a specific job: \\texttt{scontrol show job \\(<\\) job id \\(>\\) } \\item Delete a specific job: \\texttt{scancel \\(<\\) job id \\(>\\) } \\item Delete all your own jobs: \\texttt{scancel -u \\(<\\) user \\(>\\) } \\item More detailed info about jobs: \\ \\end{footnotesize} \\begin{scriptsize} \\texttt{sacct -l -j \\(<\\) jobid \\(>\\) -o jobname,NTasks,nodelist,MaxRSS,MaxVMSize\u2026} \\end{scriptsize} \\begin{itemize} \\begin{footnotesize} \\item More flags can be found with \\texttt{man sacct} \\item The output will be \\textbf{very} wide. To view, use \\ \\texttt{sacct -l -j ....... | less -S} \\ (makes it sideways scrollable, using the left/right arrow key) \\end{footnotesize} \\end{itemize} \\begin{footnotesize} \\item Web url with graphical info about a job: \\texttt{job-usage \\(<\\) job-id \\(>\\) } \\end{footnotesize} \\end{itemize} Use \\texttt{man sbatch, man srun, man ....} for more information \\end{block} } \\frame{\\frametitle{The Batch System (SLURM)}\\framesubtitle{Job Output} \\begin{block}{} \\begin{itemize} \\item Output and errors in: \\ \\texttt{slurm- \\(<\\) job id \\(>\\) .out} \\item Look at it with vi, nano, emacs, cat, less\u2026 \\item To get output and error files split up, you can give these flags in the submit script: \\ \\texttt{#SBATCH \u2013error=job.\\%J.err} \\ \\texttt{#SBATCH \u2013output=job.\\%J.out} \\end{itemize} \\end{block} } \\frame{\\frametitle{The Batch System (SLURM)}\\framesubtitle{Using different parts of Kebnekaise} \\begin{block}{} \\begin{scriptsize} \\begin{itemize} \\item Use the \u2018fat\u2019 nodes by adding this flag to your script: \\ \\texttt{#SBATCH -p largemem} (separate resource) \\ \\item Specifying Intel Broadwell, Intel Skylake, or AMD Zen3 CPUs: \\ \\texttt{#SBATCH \u2013constraint=broadwell} \\ or \\ \\texttt{#SBATCH \u2013constraint=skylake} \\ or \\ \\texttt{#SBATCH \u2013constraint=zen3} \\ \\item Using the GPU nodes (separate resource): \\ \\texttt{#SBATCH \u2013gres=gpu: \\(<\\) type-of-card \\(>\\) :x} where \\(<\\) type-of-card \\(>\\) is either k80, v100, or a100 and x = 1, 2, or 4 (4 only for K80). \\ \\begin{itemize} \\begin{scriptsize} \\item In the case of the A100 GPU nodes, you also need to add a partition \\ \\texttt{#SBATCH -p amd_gpu} \\end{scriptsize} \\end{itemize} \\item Use the AMD login node for correct modules and compilers for AMD Zen3 and A100 nodes: \\ \\texttt{kebnekaise-amd-tl.hpc2n.umu.se} or \\\\texttt{kebnekaise-amd.hpc2n.umu.se} \\end{itemize} More on https://www.hpc2n.umu.se/documentation/guides/using_kebnekaise \\end{scriptsize} \\end{block} } \\frame{\\frametitle{The Batch System (SLURM)}\\framesubtitle{Simple example, serial} \\begin{block}{} \\justify \\begin{footnotesize} Example: Serial job on Kebnekaise, compiler toolchain \u2018foss\u2019 \\end{footnotesize} \\end{block} \\begin{block}{} \\begin{footnotesize} \\texttt{#!/bin/bash} \\ \\texttt{# Project id - change to your own after the course!} \\ \\texttt{#SBATCH -A hpc2n2023-132} \\ \\texttt{# Asking for 1 core} \\ \\texttt{#SBATCH -n 1} \\ \\texttt{# Asking for a walltime of 5 min} \\ \\texttt{#SBATCH \u2013time=00:05:00} \\ \\vspace{3mm} \\texttt{# Purge modules before loading new ones in a script. } \\ \\texttt{ml purge > /dev/null 2>\\&1} \\ \\texttt{ml foss/2021b} \\ \\vspace{3mm} \\texttt{./my_serial_program} \\end{footnotesize} \\end{block} \\begin{block}{} \\justify \\begin{footnotesize} Submit with: \\ \\texttt{sbatch \\(<\\) jobscript \\(>\\) } \\end{footnotesize} \\end{block} } \\frame{\\frametitle{The Batch System (SLURM)}\\framesubtitle{Example, MPI C program} \\begin{block}{} \\begin{footnotesize} \\texttt{#include \\(<\\) stdio.h \\(>\\) } \\ \\texttt{#include \\(<\\) mpi.h \\(>\\) } \\ \\vspace{3mm} \\texttt{int main (int argc, char *argv[]) {} \\ \\vspace{3mm} \\texttt{int myrank, size;} \\ \\vspace{3mm} \\texttt{MPI_Init(\\&argc, \\&argv);} \\ \\texttt{MPI_Comm_rank(MPI_COMM_WORLD, \\&myrank);} \\ \\texttt{MPI_Comm_size(MPI_COMM_WORLD, \\&size);} \\ \\vspace{3mm} \\texttt{printf(\u201cProcessor \\%d of \\%d: Hello World!\\textbackslash n\u201d, myrank, size);} \\ \\vspace{3mm} \\texttt{MPI_Finalize();} \\vspace{3mm} \\texttt{}} \\end{footnotesize} \\end{block} } \\frame{\\frametitle{The Batch System (SLURM)}\\framesubtitle{Simple example, parallel} \\begin{block}{} \\justify \\begin{footnotesize} Example: MPI job on Kebnekaise, compiler toolchain \u2018foss\u2019 \\end{footnotesize} \\end{block} \\begin{block}{} \\begin{footnotesize} \\texttt{#!/bin/bash} \\ \\texttt{#SBATCH -A hpc2n2023-132} \\ \\texttt{#SBATCH -n 14} \\ \\texttt{#SBATCH \u2013time=00:05:00} \\ \\texttt{##SBATCH \u2013exclusive} \\ \\texttt{#SBATCH \u2013reservation=intro-gpu} \\ \\vspace{3mm} \\texttt{module purge > /dev/null 2>\\&1} \\ \\texttt{ml foss/2021b} \\ \\vspace{3mm} \\texttt{srun ./my_parallel_program} \\end{footnotesize} \\end{block} } \\begin{frame}[fragile]\\frametitle{The Batch System (SLURM)}\\framesubtitle{Simple example, output} \\begin{block}{} \\justify Example: Output from a MPI job on Kebnekaise, run on 14 cores (one NUMA island) \\end{block} \\begin{block}{} \\begin{tiny} \\begin{verbatim} b-an01 [~/slurm]$ cat slurm-15952.out Processor 12 of 14: Hello World! Processor 5 of 14: Hello World! Processor 9 of 14: Hello World! Processor 4 of 14: Hello World! Processor 11 of 14: Hello World! Processor 13 of 14: Hello World! Processor 0 of 14: Hello World! Processor 1 of 14: Hello World! Processor 2 of 14: Hello World! Processor 3 of 14: Hello World! Processor 6 of 14: Hello World! Processor 7 of 14: Hello World! Processor 8 of 14: Hello World! Processor 10 of 14: Hello World! \\end{verbatim} \\end{tiny} \\end{block} \\end{frame} \\frame{\\frametitle{The Batch System (SLURM)}\\framesubtitle{Starting more than one serial job in the same submit file} \\begin{block}{} \\begin{small} \\texttt{#!/bin/bash} \\ \\texttt{#SBATCH -A hpc2n2023-132} \\ \\texttt{#SBATCH -n 5} \\ \\texttt{#SBATCH \u2013time=00:15:00} \\ \\vspace{3mm} \\texttt{module purge > /dev/null 2>\\&1} \\ \\texttt{ml foss/2021b} \\ \\vspace{3mm} \\texttt{srun -n 1 ./job1.batch \\&} \\ \\texttt{srun -n 1 ./job2.batch \\&} \\ \\texttt{srun -n 1 ./job3.batch \\&} \\ \\texttt{srun -n 1 ./job4.batch \\&} \\ \\texttt{srun -n 1 ./job5.batch } \\ \\texttt{wait} \\ \\end{small} \\end{block} } \\frame{\\frametitle{The Batch System (SLURM)}\\framesubtitle{Multiple Parallel Jobs Sequentially} \\begin{block}{} \\begin{scriptsize} \\texttt{#!/bin/bash} \\ \\texttt{#SBATCH -A hpc2n2023-132} \\ \\texttt{#SBATCH -c 28} \\ \\texttt{# Remember to ask for enough time for all jobs to complete} \\ \\texttt{#SBATCH \u2013time=02:00:00} \\ \\vspace{3mm} \\texttt{module purge > /dev/null 2>\\&1} \\ \\texttt{ml foss/2021b} \\ \\vspace{3mm} \\texttt{# Here 14 tasks with 2 cores per task. Output to file.} \\ \\texttt{# Not needed if your job creates output in a file} \\ \\texttt{# I also copy the output somewhere else and then run} \\ \\texttt{# another executable\u2026} \\ \\vspace{3mm} \\texttt{srun -n 14 -c 2 ./a.out > myoutput1 2>\\&1} \\ \\texttt{cp myoutput1 /pfs/nobackup/home/u/username/mydatadir} \\ \\texttt{srun -n 14 -c 2 ./b.out > myoutput2 2>\\&1} \\ \\texttt{cp myoutput2 /pfs/nobackup/home/u/username/mydatadir} \\ \\texttt{srun -n 14 -c 2 ./c.out > myoutput3 2>\\&1} \\ \\texttt{cp myoutput3 /pfs/nobackup/home/u/username/mydatadir} \\ \\end{scriptsize} \\end{block} } \\frame{\\frametitle{The Batch System (SLURM)}\\framesubtitle{Multiple Parallel Jobs Simultaneously} \\[\\begin{footnotesize} Make sure you ask for enough cores that all jobs can run at the same time, and have enough memory. Of course, this will also work for serial jobs - just remove the srun from the command line. \\end{footnotesize}\\] \\begin{block}{} \\begin{footnotesize} \\texttt{#!/bin/bash} \\ \\texttt{#SBATCH -A hpc2n2023-132} \\ \\texttt{# Total number of cores the jobs need} \\ \\texttt{#SBATCH -n 56} \\ \\texttt{# Remember to ask for enough time for all of the jobs to} \\ \\texttt{# complete, even the longest} \\ \\texttt{#SBATCH \u2013time=02:00:00} \\ \\vspace{3mm} \\texttt{module purge > /dev/null 2>\\&1} \\ \\texttt{ml foss/2021b} \\ \\vspace{3mm} \\texttt{srun -n 14 \u2013cpu_bind=cores ./a.out \\&} \\ \\texttt{srun -n 28 \u2013cpu_bind=cores ./b.out \\&} \\ \\texttt{srun -n 14 \u2013cpu_bind=cores ./c.out \\&} \\ \\texttt{\u2026} \\ \\texttt{wait} \\ \\end{footnotesize} \\end{block} } \\frame{\\frametitle{The Batch System (SLURM)}\\framesubtitle{GPU Job - V100} \\begin{block}{} \\begin{footnotesize} \\texttt{#!/bin/bash} \\ \\texttt{#SBATCH -A hpc2n2023-132} \\ \\texttt{# Expected time for job to complete} \\ \\texttt{#SBATCH \u2013time=00:10:00} \\ \\texttt{# Number of GPU cards needed. Here asking for 2 V100 cards} \\ \\texttt{#SBATCH \u2013gres=v100:2} \\ \\vspace{3mm} \\texttt{module purge > /dev/null 2>\\&1} \\ \\texttt{# Change to modules needed for your program} \\ \\texttt{ml fosscuda/2021b} \\ \\vspace{3mm} \\texttt{./my-cuda-program} \\ \\end{footnotesize} \\end{block} } \\frame{\\frametitle{The Batch System (SLURM)}\\framesubtitle{GPU Job - A100} \\begin{block}{} \\begin{footnotesize} \\texttt{#!/bin/bash} \\ \\texttt{#SBATCH -A hpc2n2023-132} \\ \\texttt{# Expected time for job to complete} \\ \\texttt{#SBATCH \u2013time=00:10:00} \\ \\texttt{# Adding the partition for the A100 GPUs} \\ \\texttt{#SBATCH -p amd_gpu} \\ \\texttt{# Number of GPU cards needed. Here asking for 2 A100 cards} \\ \\texttt{#SBATCH \u2013gres=a100:2} \\ \\vspace{3mm} \\texttt{module purge > /dev/null 2>\\&1} \\ \\texttt{# Change to modules needed for your software - remember to login} \\ \\texttt{# to kebnekaise-amd.hpc2n.umu.se or} \\ \\texttt{# kebnekaise-amd-tl.hpc2n.umu.se login node to see availability} \\ \\texttt{ml CUDA/11.7.0} \\ \\vspace{3mm} \\texttt{./my-cuda-program} \\ \\end{footnotesize} \\end{block} } \\frame{\\frametitle{Important information} \\begin{block}{} \\begin{itemize} \\begin{small} \\item The course project has the following project ID: hpc2n2023-132 \\item In order to use it in a batch job, add this to the batch script: \\begin{itemize} \\begin{small} \\item #SBATCH -A hpc2n2023-132 \\end{small} \\end{itemize} \\item There is a reservation with one A100 GPU node reserved for the course, in order to let us run small GPU examples without having to wait for too long. The reservation also is for one Broadwell CPU node. \\item The reservation is ONLY valid during the course: \\begin{itemize} \\begin{small} \\item intro-gpu \\ (add with #SBATCH \u2013reservation=intro-gpu) \\end{small} \\item To use the reservation with the A100 GPU node, also add \\texttt{#SBATCH -p amd_gpu} and \\texttt{#SBATCH \u2013gres=a100:x (for x=1,2)}. \\end{itemize} \\item We have a storage project linked to the compute project. It is hpc2n2023-132. You find it in /proj/nobackup/hpc2n2023-132. Remember to create your own directory under it. \\end{small} \\end{itemize} \\end{block} } \\frame{\\frametitle{Questions and support} \\begin{block}{} \\textbf{Questions?} Now: Ask me or one of the other support or application experts present. \\vspace{0.5cm} OR \\vspace{0.5cm} \\begin{itemize} \\item Documentation: \\texttt{https://www.hpc2n.umu.se/support} \\item Support questions to: \\texttt{https://supr.naiss.se/support/} or \\texttt{support@hpc2n.umu.se} \\end{itemize} \\end{block} } \\end{document}","title":"Compiling"},{"location":"compilers/#compiling__and__linking__with__libraries","text":"Objectives Learn about the compilers at HPC2N How to load the compiler toolchains How to use the compilers What are the popular flags How to link with libraries.","title":"Compiling and Linking with Libraries"},{"location":"compilers/#installed__compilers","text":"There are compilers available for Fortran 77, Fortran 90, Fortran 95, C, and C++. The compilers can produce both general-purpose code and architecture-specific optimized code to improve performance (loop-level optimizations, inter-procedural analysis and cache optimizations).","title":"Installed compilers"},{"location":"compilers/#loading__compilers","text":"Note You need to load a compiler suite (and possibly libraries, depending on what you need) before you can compile and link. Use ml av to get a list of available compiler toolchains as mentioned in the modules - compiler toolchains section. You load a compiler toolchain the same way you load any other module. They are always available directly, without the need to load prerequisites first. Example: Loading foss/2023b This compiler toolchain contains: GCC/13.2.0 , BLAS (with LAPACK , ScaLAPACK , and FFTW . b-an01 [ ~ ] $ ml foss/2023b b-an01 [ ~ ] $ ml Currently Loaded Modules: 1 ) snicenvironment ( S ) 7 ) numactl/2.0.16 13 ) libevent/2.1.12 19 ) FlexiBLAS/3.3.1 2 ) systemdefault ( S ) 8 ) XZ/5.4.4 14 ) UCX/1.15.0 20 ) FFTW/3.3.10 3 ) GCCcore/13.2.0 9 ) libxml2/2.11.5 15 ) PMIx/4.2.6 21 ) FFTW.MPI/3.3.10 4 ) zlib/1.2.13 10 ) libpciaccess/0.17 16 ) UCC/1.2.0 22 ) ScaLAPACK/2.2.0-fb 5 ) binutils/2.40 11 ) hwloc/2.9.2 17 ) OpenMPI/4.1.6 23 ) foss/2023b 6 ) GCC/13.2.0 12 ) OpenSSL/1.1 18 ) OpenBLAS/0.3.24 Where: S: Module is Sticky, requires --force to unload or purge b-an01 [ ~ ] $","title":"Loading compilers"},{"location":"compilers/#compiling","text":"","title":"Compiling"},{"location":"compilers/#compiling__with__gcc","text":"Language Compiler name MPI Fortran77 gfortran mpif77 Fortran90 gfortran mpif90 Fortran95 gfortran N/A C gcc mpicc C++ g++ mpiCC Example: compiling hello.c You can find the file hello.c in the exercises directory, in the subdirectory \u201csimple\u201d. Or you can download it here: \\) } \\item Get list of your jobs: \\texttt{squeue -u \\(<\\) username \\(>\\) } \\item \\texttt{srun \\(<\\) commands for your job/program \\(>\\) } \\item Check on a specific job: \\texttt{scontrol show job \\(<\\) job id \\(>\\) } \\item Delete a specific job: \\texttt{scancel \\(<\\) job id \\(>\\) } \\item Delete all your own jobs: \\texttt{scancel -u \\(<\\) user \\(>\\) } \\item More detailed info about jobs: \\ \\end{footnotesize} \\begin{scriptsize} \\texttt{sacct -l -j \\(<\\) jobid \\(>\\) -o jobname,NTasks,nodelist,MaxRSS,MaxVMSize\u2026} \\end{scriptsize} \\begin{itemize} \\begin{footnotesize} \\item More flags can be found with \\texttt{man sacct} \\item The output will be \\textbf{very} wide. To view, use \\ \\texttt{sacct -l -j ....... | less -S} \\ (makes it sideways scrollable, using the left/right arrow key) \\end{footnotesize} \\end{itemize} \\begin{footnotesize} \\item Web url with graphical info about a job: \\texttt{job-usage \\(<\\) job-id \\(>\\) } \\end{footnotesize} \\end{itemize} Use \\texttt{man sbatch, man srun, man ....} for more information \\end{block} } \\frame{\\frametitle{The Batch System (SLURM)}\\framesubtitle{Job Output} \\begin{block}{} \\begin{itemize} \\item Output and errors in: \\ \\texttt{slurm- \\(<\\) job id \\(>\\) .out} \\item Look at it with vi, nano, emacs, cat, less\u2026 \\item To get output and error files split up, you can give these flags in the submit script: \\ \\texttt{#SBATCH \u2013error=job.\\%J.err} \\ \\texttt{#SBATCH \u2013output=job.\\%J.out} \\end{itemize} \\end{block} } \\frame{\\frametitle{The Batch System (SLURM)}\\framesubtitle{Using different parts of Kebnekaise} \\begin{block}{} \\begin{scriptsize} \\begin{itemize} \\item Use the \u2018fat\u2019 nodes by adding this flag to your script: \\ \\texttt{#SBATCH -p largemem} (separate resource) \\ \\item Specifying Intel Broadwell, Intel Skylake, or AMD Zen3 CPUs: \\ \\texttt{#SBATCH \u2013constraint=broadwell} \\ or \\ \\texttt{#SBATCH \u2013constraint=skylake} \\ or \\ \\texttt{#SBATCH \u2013constraint=zen3} \\ \\item Using the GPU nodes (separate resource): \\ \\texttt{#SBATCH \u2013gres=gpu: \\(<\\) type-of-card \\(>\\) :x} where \\(<\\) type-of-card \\(>\\) is either k80, v100, or a100 and x = 1, 2, or 4 (4 only for K80). \\ \\begin{itemize} \\begin{scriptsize} \\item In the case of the A100 GPU nodes, you also need to add a partition \\ \\texttt{#SBATCH -p amd_gpu} \\end{scriptsize} \\end{itemize} \\item Use the AMD login node for correct modules and compilers for AMD Zen3 and A100 nodes: \\ \\texttt{kebnekaise-amd-tl.hpc2n.umu.se} or \\\\texttt{kebnekaise-amd.hpc2n.umu.se} \\end{itemize} More on https://www.hpc2n.umu.se/documentation/guides/using_kebnekaise \\end{scriptsize} \\end{block} } \\frame{\\frametitle{The Batch System (SLURM)}\\framesubtitle{Simple example, serial} \\begin{block}{} \\justify \\begin{footnotesize} Example: Serial job on Kebnekaise, compiler toolchain \u2018foss\u2019 \\end{footnotesize} \\end{block} \\begin{block}{} \\begin{footnotesize} \\texttt{#!/bin/bash} \\ \\texttt{# Project id - change to your own after the course!} \\ \\texttt{#SBATCH -A hpc2n2023-132} \\ \\texttt{# Asking for 1 core} \\ \\texttt{#SBATCH -n 1} \\ \\texttt{# Asking for a walltime of 5 min} \\ \\texttt{#SBATCH \u2013time=00:05:00} \\ \\vspace{3mm} \\texttt{# Purge modules before loading new ones in a script. } \\ \\texttt{ml purge > /dev/null 2>\\&1} \\ \\texttt{ml foss/2021b} \\ \\vspace{3mm} \\texttt{./my_serial_program} \\end{footnotesize} \\end{block} \\begin{block}{} \\justify \\begin{footnotesize} Submit with: \\ \\texttt{sbatch \\(<\\) jobscript \\(>\\) } \\end{footnotesize} \\end{block} } \\frame{\\frametitle{The Batch System (SLURM)}\\framesubtitle{Example, MPI C program} \\begin{block}{} \\begin{footnotesize} \\texttt{#include \\(<\\) stdio.h \\(>\\) } \\ \\texttt{#include \\(<\\) mpi.h \\(>\\) } \\ \\vspace{3mm} \\texttt{int main (int argc, char *argv[]) {} \\ \\vspace{3mm} \\texttt{int myrank, size;} \\ \\vspace{3mm} \\texttt{MPI_Init(\\&argc, \\&argv);} \\ \\texttt{MPI_Comm_rank(MPI_COMM_WORLD, \\&myrank);} \\ \\texttt{MPI_Comm_size(MPI_COMM_WORLD, \\&size);} \\ \\vspace{3mm} \\texttt{printf(\u201cProcessor \\%d of \\%d: Hello World!\\textbackslash n\u201d, myrank, size);} \\ \\vspace{3mm} \\texttt{MPI_Finalize();} \\vspace{3mm} \\texttt{}} \\end{footnotesize} \\end{block} } \\frame{\\frametitle{The Batch System (SLURM)}\\framesubtitle{Simple example, parallel} \\begin{block}{} \\justify \\begin{footnotesize} Example: MPI job on Kebnekaise, compiler toolchain \u2018foss\u2019 \\end{footnotesize} \\end{block} \\begin{block}{} \\begin{footnotesize} \\texttt{#!/bin/bash} \\ \\texttt{#SBATCH -A hpc2n2023-132} \\ \\texttt{#SBATCH -n 14} \\ \\texttt{#SBATCH \u2013time=00:05:00} \\ \\texttt{##SBATCH \u2013exclusive} \\ \\texttt{#SBATCH \u2013reservation=intro-gpu} \\ \\vspace{3mm} \\texttt{module purge > /dev/null 2>\\&1} \\ \\texttt{ml foss/2021b} \\ \\vspace{3mm} \\texttt{srun ./my_parallel_program} \\end{footnotesize} \\end{block} } \\begin{frame}[fragile]\\frametitle{The Batch System (SLURM)}\\framesubtitle{Simple example, output} \\begin{block}{} \\justify Example: Output from a MPI job on Kebnekaise, run on 14 cores (one NUMA island) \\end{block} \\begin{block}{} \\begin{tiny} \\begin{verbatim} b-an01 [~/slurm]$ cat slurm-15952.out Processor 12 of 14: Hello World! Processor 5 of 14: Hello World! Processor 9 of 14: Hello World! Processor 4 of 14: Hello World! Processor 11 of 14: Hello World! Processor 13 of 14: Hello World! Processor 0 of 14: Hello World! Processor 1 of 14: Hello World! Processor 2 of 14: Hello World! Processor 3 of 14: Hello World! Processor 6 of 14: Hello World! Processor 7 of 14: Hello World! Processor 8 of 14: Hello World! Processor 10 of 14: Hello World! \\end{verbatim} \\end{tiny} \\end{block} \\end{frame} \\frame{\\frametitle{The Batch System (SLURM)}\\framesubtitle{Starting more than one serial job in the same submit file} \\begin{block}{} \\begin{small} \\texttt{#!/bin/bash} \\ \\texttt{#SBATCH -A hpc2n2023-132} \\ \\texttt{#SBATCH -n 5} \\ \\texttt{#SBATCH \u2013time=00:15:00} \\ \\vspace{3mm} \\texttt{module purge > /dev/null 2>\\&1} \\ \\texttt{ml foss/2021b} \\ \\vspace{3mm} \\texttt{srun -n 1 ./job1.batch \\&} \\ \\texttt{srun -n 1 ./job2.batch \\&} \\ \\texttt{srun -n 1 ./job3.batch \\&} \\ \\texttt{srun -n 1 ./job4.batch \\&} \\ \\texttt{srun -n 1 ./job5.batch } \\ \\texttt{wait} \\ \\end{small} \\end{block} } \\frame{\\frametitle{The Batch System (SLURM)}\\framesubtitle{Multiple Parallel Jobs Sequentially} \\begin{block}{} \\begin{scriptsize} \\texttt{#!/bin/bash} \\ \\texttt{#SBATCH -A hpc2n2023-132} \\ \\texttt{#SBATCH -c 28} \\ \\texttt{# Remember to ask for enough time for all jobs to complete} \\ \\texttt{#SBATCH \u2013time=02:00:00} \\ \\vspace{3mm} \\texttt{module purge > /dev/null 2>\\&1} \\ \\texttt{ml foss/2021b} \\ \\vspace{3mm} \\texttt{# Here 14 tasks with 2 cores per task. Output to file.} \\ \\texttt{# Not needed if your job creates output in a file} \\ \\texttt{# I also copy the output somewhere else and then run} \\ \\texttt{# another executable\u2026} \\ \\vspace{3mm} \\texttt{srun -n 14 -c 2 ./a.out > myoutput1 2>\\&1} \\ \\texttt{cp myoutput1 /pfs/nobackup/home/u/username/mydatadir} \\ \\texttt{srun -n 14 -c 2 ./b.out > myoutput2 2>\\&1} \\ \\texttt{cp myoutput2 /pfs/nobackup/home/u/username/mydatadir} \\ \\texttt{srun -n 14 -c 2 ./c.out > myoutput3 2>\\&1} \\ \\texttt{cp myoutput3 /pfs/nobackup/home/u/username/mydatadir} \\ \\end{scriptsize} \\end{block} } \\frame{\\frametitle{The Batch System (SLURM)}\\framesubtitle{Multiple Parallel Jobs Simultaneously} \\[\\begin{footnotesize} Make sure you ask for enough cores that all jobs can run at the same time, and have enough memory. Of course, this will also work for serial jobs - just remove the srun from the command line. \\end{footnotesize}\\] \\begin{block}{} \\begin{footnotesize} \\texttt{#!/bin/bash} \\ \\texttt{#SBATCH -A hpc2n2023-132} \\ \\texttt{# Total number of cores the jobs need} \\ \\texttt{#SBATCH -n 56} \\ \\texttt{# Remember to ask for enough time for all of the jobs to} \\ \\texttt{# complete, even the longest} \\ \\texttt{#SBATCH \u2013time=02:00:00} \\ \\vspace{3mm} \\texttt{module purge > /dev/null 2>\\&1} \\ \\texttt{ml foss/2021b} \\ \\vspace{3mm} \\texttt{srun -n 14 \u2013cpu_bind=cores ./a.out \\&} \\ \\texttt{srun -n 28 \u2013cpu_bind=cores ./b.out \\&} \\ \\texttt{srun -n 14 \u2013cpu_bind=cores ./c.out \\&} \\ \\texttt{\u2026} \\ \\texttt{wait} \\ \\end{footnotesize} \\end{block} } \\frame{\\frametitle{The Batch System (SLURM)}\\framesubtitle{GPU Job - V100} \\begin{block}{} \\begin{footnotesize} \\texttt{#!/bin/bash} \\ \\texttt{#SBATCH -A hpc2n2023-132} \\ \\texttt{# Expected time for job to complete} \\ \\texttt{#SBATCH \u2013time=00:10:00} \\ \\texttt{# Number of GPU cards needed. Here asking for 2 V100 cards} \\ \\texttt{#SBATCH \u2013gres=v100:2} \\ \\vspace{3mm} \\texttt{module purge > /dev/null 2>\\&1} \\ \\texttt{# Change to modules needed for your program} \\ \\texttt{ml fosscuda/2021b} \\ \\vspace{3mm} \\texttt{./my-cuda-program} \\ \\end{footnotesize} \\end{block} } \\frame{\\frametitle{The Batch System (SLURM)}\\framesubtitle{GPU Job - A100} \\begin{block}{} \\begin{footnotesize} \\texttt{#!/bin/bash} \\ \\texttt{#SBATCH -A hpc2n2023-132} \\ \\texttt{# Expected time for job to complete} \\ \\texttt{#SBATCH \u2013time=00:10:00} \\ \\texttt{# Adding the partition for the A100 GPUs} \\ \\texttt{#SBATCH -p amd_gpu} \\ \\texttt{# Number of GPU cards needed. Here asking for 2 A100 cards} \\ \\texttt{#SBATCH \u2013gres=a100:2} \\ \\vspace{3mm} \\texttt{module purge > /dev/null 2>\\&1} \\ \\texttt{# Change to modules needed for your software - remember to login} \\ \\texttt{# to kebnekaise-amd.hpc2n.umu.se or} \\ \\texttt{# kebnekaise-amd-tl.hpc2n.umu.se login node to see availability} \\ \\texttt{ml CUDA/11.7.0} \\ \\vspace{3mm} \\texttt{./my-cuda-program} \\ \\end{footnotesize} \\end{block} } \\frame{\\frametitle{Important information} \\begin{block}{} \\begin{itemize} \\begin{small} \\item The course project has the following project ID: hpc2n2023-132 \\item In order to use it in a batch job, add this to the batch script: \\begin{itemize} \\begin{small} \\item #SBATCH -A hpc2n2023-132 \\end{small} \\end{itemize} \\item There is a reservation with one A100 GPU node reserved for the course, in order to let us run small GPU examples without having to wait for too long. The reservation also is for one Broadwell CPU node. \\item The reservation is ONLY valid during the course: \\begin{itemize} \\begin{small} \\item intro-gpu \\ (add with #SBATCH \u2013reservation=intro-gpu) \\end{small} \\item To use the reservation with the A100 GPU node, also add \\texttt{#SBATCH -p amd_gpu} and \\texttt{#SBATCH \u2013gres=a100:x (for x=1,2)}. \\end{itemize} \\item We have a storage project linked to the compute project. It is hpc2n2023-132. You find it in /proj/nobackup/hpc2n2023-132. Remember to create your own directory under it. \\end{small} \\end{itemize} \\end{block} } \\frame{\\frametitle{Questions and support} \\begin{block}{} \\textbf{Questions?} Now: Ask me or one of the other support or application experts present. \\vspace{0.5cm} OR \\vspace{0.5cm} \\begin{itemize} \\item Documentation: \\texttt{https://www.hpc2n.umu.se/support} \\item Support questions to: \\texttt{https://supr.naiss.se/support/} or \\texttt{support@hpc2n.umu.se} \\end{itemize} \\end{block} } \\end{document}","title":"Build environment"},{"location":"filesystem/","text":"The File System \u00b6 Objectives Learn about the file system on Kebnekaise Find the project storage for this course and create your own subdirectory Overview \u00b6 Project storage $HOME /scratch Recommended for batch jobs Yes No (size) Yes Backed up No Yes No Accessible by batch system Yes Yes Yes (node only) Performance High High Medium Default readability Group only Owner Owner Permissions management chmod, chgrp, ACL chmod, chgrp, ACL N/A for batch jobs Notes Storage your group get allocated through the storage projects Your home-directory Per node $HOME \u00b6 This is your home-directory (pointed to by the $HOME variable). It has a quota limit of 25GB per default. Your home directory is backed up regularly. Note Since the home directory is quite small, it should not be used for most production jobs. These should instead be run from project storage directories. To find the path to your home directory, either run pwd just after logging in, or do the following: b-an01 [ ~/store ] $ cd b-an01 [ ~ ] $ pwd /home/u/username b-an01 [ ~ ] $ Project storage \u00b6 Project storage is where a project\u2019s members have the majority of their storage. It is applied for through SUPR, as a storage project. While storage projects needs to be applied for separately, they are usually linked to a compute project. This is where you should keep your data and run your batch jobs from. It offers high performance when accessed from the nodes making it suitable for storage that are to be accessed from parallel jobs, and your home directory (usually) has too little space. Project storage is located below /proj/nobackup/ in the directory name selected during the creation of the proposal. Note The project storage is not intended for permanent storage and there is NO BACKUP of /proj/nobackup . Using project storage \u00b6 If you have a storage project, you should use that to run your jobs. You (your PI) will either choose a directory name when you/they apply for the storage project or get the project id as default name. The location of the storage project in the file system is /proj/nobackup/NAME-YOU-PICKED Since the storage project is shared between all users of the project, you should go to that directory and create a subdirectory for your things, which you will then be using.- For this course the storage is in /proj/nobackup/intro-hpc2n Exercise Go to the course project storage and create a subdirectory for yourself. Now is a good time to prepare the course material and download the exercises. The easiest way to do so is by cloning the whole intro-course repository from GitHub. Exercise Go to the subdirectory you created under /proj/nobackup/intro-hpc2n Clone the repository for the course: git clone https://github.com/hpc2n/intro-course.git You will get a directory called intro-course . Below it you will find a directory called \u201cexercises\u201d where the majority of the exercises for the batch system section is located. Quota \u00b6 The size of the storage depends on the allocation. There are small, medium, and large storage projects, each with their own requirements. You can read about this on SUPR. The quota limits are specific for the project as such, there are no user level quotas on that space. /scratch \u00b6 Our recommendation is that you use the project storage instead of /scratch when working on Compute nodes or Login nodes. On the computers at HPC2N there is a directory called /scratch . It is a small local area split between the users using the node and it can be used for saving (temporary) files you create or need during your computations. Please do not save files in /scratch you don\u2019t need when not running jobs on the machine, and please make sure your job removes any temporary files it creates. Note When anybody need more space than available on /scratch , we will remove the oldest/largest files without any notices. More information about the file system, as well as archiving and compressing files, at the HPC2N documentation about File Systems . Keypoints When you login to Kebnekaise, you will end up in your home-directory. Your home-directory is in /home/u/username and is pointed to by the environment variable $HOME . Your project storage is located in /proj/nobackup/NAME-YOU-PICKED For this course it is /proj/nobackup/intro-hpc2n . The project storage is NOT backed up. You should run the batch jobs from your project storage.","title":"The File System"},{"location":"filesystem/#the__file__system","text":"Objectives Learn about the file system on Kebnekaise Find the project storage for this course and create your own subdirectory","title":"The File System"},{"location":"filesystem/#overview","text":"Project storage $HOME /scratch Recommended for batch jobs Yes No (size) Yes Backed up No Yes No Accessible by batch system Yes Yes Yes (node only) Performance High High Medium Default readability Group only Owner Owner Permissions management chmod, chgrp, ACL chmod, chgrp, ACL N/A for batch jobs Notes Storage your group get allocated through the storage projects Your home-directory Per node","title":"Overview"},{"location":"filesystem/#home","text":"This is your home-directory (pointed to by the $HOME variable). It has a quota limit of 25GB per default. Your home directory is backed up regularly. Note Since the home directory is quite small, it should not be used for most production jobs. These should instead be run from project storage directories. To find the path to your home directory, either run pwd just after logging in, or do the following: b-an01 [ ~/store ] $ cd b-an01 [ ~ ] $ pwd /home/u/username b-an01 [ ~ ] $","title":"$HOME"},{"location":"filesystem/#project__storage","text":"Project storage is where a project\u2019s members have the majority of their storage. It is applied for through SUPR, as a storage project. While storage projects needs to be applied for separately, they are usually linked to a compute project. This is where you should keep your data and run your batch jobs from. It offers high performance when accessed from the nodes making it suitable for storage that are to be accessed from parallel jobs, and your home directory (usually) has too little space. Project storage is located below /proj/nobackup/ in the directory name selected during the creation of the proposal. Note The project storage is not intended for permanent storage and there is NO BACKUP of /proj/nobackup .","title":"Project storage"},{"location":"filesystem/#using__project__storage","text":"If you have a storage project, you should use that to run your jobs. You (your PI) will either choose a directory name when you/they apply for the storage project or get the project id as default name. The location of the storage project in the file system is /proj/nobackup/NAME-YOU-PICKED Since the storage project is shared between all users of the project, you should go to that directory and create a subdirectory for your things, which you will then be using.- For this course the storage is in /proj/nobackup/intro-hpc2n Exercise Go to the course project storage and create a subdirectory for yourself. Now is a good time to prepare the course material and download the exercises. The easiest way to do so is by cloning the whole intro-course repository from GitHub. Exercise Go to the subdirectory you created under /proj/nobackup/intro-hpc2n Clone the repository for the course: git clone https://github.com/hpc2n/intro-course.git You will get a directory called intro-course . Below it you will find a directory called \u201cexercises\u201d where the majority of the exercises for the batch system section is located.","title":"Using project storage"},{"location":"filesystem/#quota","text":"The size of the storage depends on the allocation. There are small, medium, and large storage projects, each with their own requirements. You can read about this on SUPR. The quota limits are specific for the project as such, there are no user level quotas on that space.","title":"Quota"},{"location":"filesystem/#scratch","text":"Our recommendation is that you use the project storage instead of /scratch when working on Compute nodes or Login nodes. On the computers at HPC2N there is a directory called /scratch . It is a small local area split between the users using the node and it can be used for saving (temporary) files you create or need during your computations. Please do not save files in /scratch you don\u2019t need when not running jobs on the machine, and please make sure your job removes any temporary files it creates. Note When anybody need more space than available on /scratch , we will remove the oldest/largest files without any notices. More information about the file system, as well as archiving and compressing files, at the HPC2N documentation about File Systems . Keypoints When you login to Kebnekaise, you will end up in your home-directory. Your home-directory is in /home/u/username and is pointed to by the environment variable $HOME . Your project storage is located in /proj/nobackup/NAME-YOU-PICKED For this course it is /proj/nobackup/intro-hpc2n . The project storage is NOT backed up. You should run the batch jobs from your project storage.","title":"/scratch"},{"location":"intro/","text":"Introduction to HPC2N, Kebnekaise and HPC \u00b6 Welcome page and syllabus: https://hpc2n.github.io/intro-linux/index.html Also link at the House symbol at the top of the page. HPC2N \u00b6 Note High Performance Computing Center North (HPC2N) is a competence center for Scientific and Parallel Computing part of National Academic Infrastructure for Super\u00adcomputing in Sweden (NAISS) HPC2N provides state-of-the-art resources and expertise: Scalable and parallel HPC Large-scale storage facilities (Project storage (Lustre), SweStore, Tape) Grid and cloud computing (WLCG NT1, Swedish Science Cloud) National Data Science Node in \u201dEpidemiology and Biology of Infections\u201d (DDLS) Software for e-Science applications All levels of user support Primary, advanced, dedicated Application Experts (AEs) Primary objective To raise the national and local level of HPC competence and transfer HPC knowledge and technology to new users in academia and industry. HPC2N partners \u00b6 HPC2N is hosted by: Partners: HPC2N funding and collaborations \u00b6 Funded mainly by Ume\u00e5 University , with contributions from the other HPC2N partners . Involved in several projects and collaborations : HPC2N training and other services \u00b6 User support (primary, advanced, dedicated) Research group meetings @ UmU Also at the partner sites Online \u201cHPC2N fika\u201d User training and education program 0.5 \u2013 5 days; ready-to-run exercises Introduction to Linux, 16 September 2024 Introduction to HPC2N and Kebnekaise, 16 September 2024 Parallel programming and tools (OpenMP, MPI, debugging, perf. analyzers, Matlab, R, MD simulation, ML, GPU, \u2026) Basic Singularity, 16 October 2024 Introduction to running R, Python, Julia, and Matlab in HPC, 22-25 October 2024 Introduction to Git, 25-29 November 2024 Using Python in an HPC environment, 5-6 December 2024 Updated list: https://www.hpc2n.umu.se/events/courses Workshops and seminars NGSSC / SeSE & university courses HPC2N personnel \u00b6 Management: Paolo Bientinesi, director Bj\u00f6rn Torkelsson, deputy director Lena Hellman, administrator Application experts: Jerry Eriksson Pedro Ojeda May Birgitte Bryds\u00f6 \u00c5ke Sandgren Others: Mikael R\u00e4nnar (WLCG coord) Research Engineers under DDLS, HPC2N/SciLifeLab Paul Dulaud, System Developer, IT Abdullah Aziz, Data Engineer Data Steward System and support: Erik Andersson Birgitte Bryds\u00f6 Niklas Edmundsson (Tape coord) My Karlsson Roger Oscarsson \u00c5ke Sandgren Mattias Wadenstein (NeIC, Tier1) Lars Viklund HPC2N application experts \u00b6 HPC2N provides advanced and dedicated support in the form of Application Experts (AEs) : Jerry Eriksson: Profiling, Machine learning (DNN), MPI, OpenMP, OpenACC Pedro Ojeda May: Molecular dynamics, Profiling, QM/MM, NAMD, Amber, Gromacs, GAUSSIAN, R, Python \u00c5ke Sandgren: General high level programming assistance, VASP, Gromacs, Amber Birgitte Bryds\u00f6: General HPC, R, Python Contact through regular support HPC2N users by discipline \u00b6 Users from several scientific disciplines: Biosciences and medicine Chemistry Computing science Engineering Materials science Mathematics and statistics Physics including space physics ML, DL, and other AI HPC2N users by discipline, largest users \u00b6 Users from several scientific disciplines: Biosciences and medicine Chemistry Computing science Engineering Materials science Mathematics and statistics Physics including space physics Machine learning and artificial intelligence (several new projects) HPC2N users by software \u00b6 Kebnekaise \u00b6 The current supercomputer at HPC2N. It is a very heterogeneous system. Named after a massif (contains some of Sweden\u2019s highest mountain peaks) Kebnekaise was delivered by Lenovo and installed during the summer 2016 Opened up for general availability on November 7, 2016 In 2018, Kebnekaise was extended with 52 Intel Xeon Gold 6132 (Skylake) nodes, as well as 10 NVidian V100 (Volta) GPU nodes In 2023, Kebnekaise was extended with 2 dual NVIDIA A100 GPU nodes one many-core AMD Zen3 CPU node Kebnekaise will be continuosly upgraded, as old hardware gets retired. In 2024 Kebnekaise was extended with 2 Dual socket GPU-nodes: Lenovo ThinkSystem SR675 V3 2 x AMD EPYC 9454 48C 290W 2.75GHz Processor 768GB [24x 32GB TruDDR5 4800MHz RDIMM-A] 1 x 3.84TB Read Intensive NVMe PCIe 4.0 x4 HS SSD 1 x NVIDIA H100 SXM5 700W 80G HBM3 GPU Board 10 dual-socket GPU-nodes: ThinkSystem SR665 V3 2 x AMD EPYC 9254 24C 200W 2.9GHz Processor 384GB [24x 16GB TruDDR5 4800MHz RDIMM-A] 1 x 1.92TB Read Intensive NVMe PCIe 5.0 x4 HS SSD 2 x NVIDIA L40S 48GB PCIe Gen4 Passive GPU 8 dual-socket CPU only: ThinkSystem SR645 V3 2 x AMD EPYC 9754 128C 360W 2.25GHz Processor 768GB [24x 32GB TruDDR5 4800MHz RDIMM-A] 1 x 1 3.84TB Read Intensive NVMe PCIe 4.0 x4 HS SSD Current hardware in Kebnekaise \u00b6 Kebnekaise have CPU-only, GPU enabled and large memory nodes. The CPU-only nodes are: 2 x 14 core Intel broadwell 4460 MB memory / core 48 nodes Total of 41.6 TFlops/s 2 x 14 core Intel skylake 6785 MB memory / core 52 nodes Total of 87 TFlops/s 2 x 64 core AMD zen3 8020 MB / core 1 node Total of 11 TFlops/s 2 x 128 core AMD zen4 2516 MB / core 8 nodes Total of 216 TFlops/s The GPU enabled nodes are: 2 x 14 core Intel broadwell 9000 MB memory / core 2 x Nvidia A40 4 nodes Total of 83 TFlops/s 2 x 14 core Intel skylake 6785 MB memory / core 2 x Nvidia V100 10 nodes Total of 75 TFlops/s 2 x 24 core AMD zen3 10600 MB / core 2 x Nvidia A100 2 nodes 2 x 24 core AMD zen3 10600 MB / core 2 x AMD MI100 1 node 2 x 24 core AMD zen4 6630 MB / core 2 x Nvidia A6000 1 node 2 x 24 core AMD zen4 6630 MB / core 2 x Nvidia L40s 10 nodes 2 x 48 core AMD zen4 6630 MB / core 4 x Nvidia H100 SXM5 2 nodes The large memory nodes are: 4 x 18 core Intel broadwell 41666 MB memory / core 8 nodes Total of 13.6 TFlops/s for all these nodes Kebnekaise - HPC2N storage \u00b6 Basically four types of storage are available at HPC2N: Home directory /home/X/Xyz , $HOME , ~ 25 GB, user owned Project storage /proj/nobackup/abc Shared among project members Local scratch space $SNIC_TMP SSD (170GB), per job, per node, \u201cvolatile\u201d Tape Storage Backup Long term storage Also SweStore \u2014 disk based (dCache) Research Data Storage Infrastructure, for active research data and operated by NAISS, WLCG Kebnekaise - projects \u00b6 Compute projects To use Kebnekaise, you must be a member of a compute project . A compute project has a certain number of core hours allocated for it per month A regular CPU core cost 1 core hour per hour, other resources (e.g., GPUs) cost more Not a hard limit but projects that go over the allocation get lower priority A compute project contains a certain amount of storage. If more storage is required, you must be a member of a storage project . Note As Kebnekaise is a local cluster, you need to be affiliated with UmU, IRF, SLU, Miun, or LTU to use it. Projects are applied for through SUPR ( https://supr.naiss.se ). I will cover more details in a later section, where we go more into detail about HPC2N and Kebnekaise. HPC \u00b6 What is HPC? High Performance Computing (definition) \u201cHigh Performance Computing most generally refers to the practice of aggregating computing power in a way that delivers much higher performance than one could get out of a typical desktop computer or workstation in order to solve large problems in science, engineering, or business.\u201d From: https://insidehpc.com/hpc-basic-training/what-is-hpc/ High Performance Computing - opening the definition \u00b6 Aggregating computing power \u00b6 147 nodes totalling 6808 cores and ??? CUDA cores Compared to 4 cores in a modern laptop Higher performance \u00b6 More than 527,000,000,000,000 arithmetical operations per second (527 trillion (billion) Compared to 200,000,000,000 Flops in a modern laptop (200 billion (milliard) Solve large problems \u00b6 When does a problem become large enough for HPC? Are there other reasons for using HPC resources? (Memory, software, support, etc.) High Performance Computing - large problems \u00b6 A problem can be large for two main reasons: Execution time : The time required to form a solution to the problem is very long Memory / storage use : The solution of the problem requires a lot of memory and/or storage The former can be remedied by increasing the performance More cores, more nodes, GPUs, \u2026 The latter by adding more memory / storage More memory per node (including large memory nodes), more nodes, \u2026 Kebnekaise: 128GB - 192GB, 384GB, 512GB, 768GB, 3TB Large storage solutions, \u2026 High Performance Computing - what counts as HPC \u00b6 High Performance Computing - other reasons \u00b6 Specialized (expensive) hardware GPUs, including those optimized for AI Kebnekaise has V100, A100, A40, MI100, A6000, L40S, H100 High-end CPUs (AVX-512 etc) and ECC memory Software HPC2N holds licenses for several softwares Software is pre-configured and ready-to-use Support and documentation High Performance Computing - memory models \u00b6 Two memory models are relevant for HPC: Shared memory: Single memory space for all data. Everyone can access the same data Straightforward to use Distributed memory: Multiple distinct memory spaces. Everyone has direct access only to the local data Requires communication High Performance Computing - programming models \u00b6 The programming model changes when we aim for extra performance and/or memory: Single-core: Matlab, Python, C, Fortran, \u2026 Single stream of operations Multi-core: Vectorized Matlab, pthreads, OpenMP Multiple streams of operations Work distribution, coordination (synchronization, etc), \u2026 Distributed memory: MPI, \u2026 Multiple streams of operations Work distribution, coordination (synchronization, etc), \u2026 Data distribution and communication GPUs: CUDA, OpenCL, OpenACC, OpenMP, \u2026 Many lightweight streams of operations Work distribution, coordination (synchronization, etc), \u2026 Data distribution across memory spaces and movement High Performance Computing - software \u00b6 Complexity grows when we aim for extra performance and/or memory/storage: Single-core: LAPACK, \u2026 Load correct toolchain etc Multi-core: LAPACK + parallel BLAS, \u2026 Load correct toolchain etc Allocate correct number of cores, configure software to use correct number of cores, \u2026 Distributed memory}: ScaLAPACK, \u2026 Load correct toolchain etc Allocate correct number of nodes and cores , configure software to use correct number of nodes and cores , \u2026 Data distribution, storage, \u2026 GPUs: MAGMA, TensorFlow, \u2026 Load correct toolchain etc Allocate correct number of cores and GPUs , configure software to use correct number of cores and GPUs , \u2026","title":"Introduction to Kebnekaise and HPC2N"},{"location":"intro/#introduction__to__hpc2n__kebnekaise__and__hpc","text":"Welcome page and syllabus: https://hpc2n.github.io/intro-linux/index.html Also link at the House symbol at the top of the page.","title":"Introduction to HPC2N, Kebnekaise and HPC"},{"location":"intro/#hpc2n","text":"Note High Performance Computing Center North (HPC2N) is a competence center for Scientific and Parallel Computing part of National Academic Infrastructure for Super\u00adcomputing in Sweden (NAISS) HPC2N provides state-of-the-art resources and expertise: Scalable and parallel HPC Large-scale storage facilities (Project storage (Lustre), SweStore, Tape) Grid and cloud computing (WLCG NT1, Swedish Science Cloud) National Data Science Node in \u201dEpidemiology and Biology of Infections\u201d (DDLS) Software for e-Science applications All levels of user support Primary, advanced, dedicated Application Experts (AEs) Primary objective To raise the national and local level of HPC competence and transfer HPC knowledge and technology to new users in academia and industry.","title":"HPC2N"},{"location":"intro/#hpc2n__partners","text":"HPC2N is hosted by: Partners:","title":"HPC2N partners"},{"location":"intro/#hpc2n__funding__and__collaborations","text":"Funded mainly by Ume\u00e5 University , with contributions from the other HPC2N partners . Involved in several projects and collaborations :","title":"HPC2N funding and collaborations"},{"location":"intro/#hpc2n__training__and__other__services","text":"User support (primary, advanced, dedicated) Research group meetings @ UmU Also at the partner sites Online \u201cHPC2N fika\u201d User training and education program 0.5 \u2013 5 days; ready-to-run exercises Introduction to Linux, 16 September 2024 Introduction to HPC2N and Kebnekaise, 16 September 2024 Parallel programming and tools (OpenMP, MPI, debugging, perf. analyzers, Matlab, R, MD simulation, ML, GPU, \u2026) Basic Singularity, 16 October 2024 Introduction to running R, Python, Julia, and Matlab in HPC, 22-25 October 2024 Introduction to Git, 25-29 November 2024 Using Python in an HPC environment, 5-6 December 2024 Updated list: https://www.hpc2n.umu.se/events/courses Workshops and seminars NGSSC / SeSE & university courses","title":"HPC2N training and other services"},{"location":"intro/#hpc2n__personnel","text":"Management: Paolo Bientinesi, director Bj\u00f6rn Torkelsson, deputy director Lena Hellman, administrator Application experts: Jerry Eriksson Pedro Ojeda May Birgitte Bryds\u00f6 \u00c5ke Sandgren Others: Mikael R\u00e4nnar (WLCG coord) Research Engineers under DDLS, HPC2N/SciLifeLab Paul Dulaud, System Developer, IT Abdullah Aziz, Data Engineer Data Steward System and support: Erik Andersson Birgitte Bryds\u00f6 Niklas Edmundsson (Tape coord) My Karlsson Roger Oscarsson \u00c5ke Sandgren Mattias Wadenstein (NeIC, Tier1) Lars Viklund","title":"HPC2N personnel"},{"location":"intro/#hpc2n__application__experts","text":"HPC2N provides advanced and dedicated support in the form of Application Experts (AEs) : Jerry Eriksson: Profiling, Machine learning (DNN), MPI, OpenMP, OpenACC Pedro Ojeda May: Molecular dynamics, Profiling, QM/MM, NAMD, Amber, Gromacs, GAUSSIAN, R, Python \u00c5ke Sandgren: General high level programming assistance, VASP, Gromacs, Amber Birgitte Bryds\u00f6: General HPC, R, Python Contact through regular support","title":"HPC2N application experts"},{"location":"intro/#hpc2n__users__by__discipline","text":"Users from several scientific disciplines: Biosciences and medicine Chemistry Computing science Engineering Materials science Mathematics and statistics Physics including space physics ML, DL, and other AI","title":"HPC2N users by discipline"},{"location":"intro/#hpc2n__users__by__discipline__largest__users","text":"Users from several scientific disciplines: Biosciences and medicine Chemistry Computing science Engineering Materials science Mathematics and statistics Physics including space physics Machine learning and artificial intelligence (several new projects)","title":"HPC2N users by discipline, largest users"},{"location":"intro/#hpc2n__users__by__software","text":"","title":"HPC2N users by software"},{"location":"intro/#kebnekaise","text":"The current supercomputer at HPC2N. It is a very heterogeneous system. Named after a massif (contains some of Sweden\u2019s highest mountain peaks) Kebnekaise was delivered by Lenovo and installed during the summer 2016 Opened up for general availability on November 7, 2016 In 2018, Kebnekaise was extended with 52 Intel Xeon Gold 6132 (Skylake) nodes, as well as 10 NVidian V100 (Volta) GPU nodes In 2023, Kebnekaise was extended with 2 dual NVIDIA A100 GPU nodes one many-core AMD Zen3 CPU node Kebnekaise will be continuosly upgraded, as old hardware gets retired. In 2024 Kebnekaise was extended with 2 Dual socket GPU-nodes: Lenovo ThinkSystem SR675 V3 2 x AMD EPYC 9454 48C 290W 2.75GHz Processor 768GB [24x 32GB TruDDR5 4800MHz RDIMM-A] 1 x 3.84TB Read Intensive NVMe PCIe 4.0 x4 HS SSD 1 x NVIDIA H100 SXM5 700W 80G HBM3 GPU Board 10 dual-socket GPU-nodes: ThinkSystem SR665 V3 2 x AMD EPYC 9254 24C 200W 2.9GHz Processor 384GB [24x 16GB TruDDR5 4800MHz RDIMM-A] 1 x 1.92TB Read Intensive NVMe PCIe 5.0 x4 HS SSD 2 x NVIDIA L40S 48GB PCIe Gen4 Passive GPU 8 dual-socket CPU only: ThinkSystem SR645 V3 2 x AMD EPYC 9754 128C 360W 2.25GHz Processor 768GB [24x 32GB TruDDR5 4800MHz RDIMM-A] 1 x 1 3.84TB Read Intensive NVMe PCIe 4.0 x4 HS SSD","title":"Kebnekaise"},{"location":"intro/#current__hardware__in__kebnekaise","text":"Kebnekaise have CPU-only, GPU enabled and large memory nodes. The CPU-only nodes are: 2 x 14 core Intel broadwell 4460 MB memory / core 48 nodes Total of 41.6 TFlops/s 2 x 14 core Intel skylake 6785 MB memory / core 52 nodes Total of 87 TFlops/s 2 x 64 core AMD zen3 8020 MB / core 1 node Total of 11 TFlops/s 2 x 128 core AMD zen4 2516 MB / core 8 nodes Total of 216 TFlops/s The GPU enabled nodes are: 2 x 14 core Intel broadwell 9000 MB memory / core 2 x Nvidia A40 4 nodes Total of 83 TFlops/s 2 x 14 core Intel skylake 6785 MB memory / core 2 x Nvidia V100 10 nodes Total of 75 TFlops/s 2 x 24 core AMD zen3 10600 MB / core 2 x Nvidia A100 2 nodes 2 x 24 core AMD zen3 10600 MB / core 2 x AMD MI100 1 node 2 x 24 core AMD zen4 6630 MB / core 2 x Nvidia A6000 1 node 2 x 24 core AMD zen4 6630 MB / core 2 x Nvidia L40s 10 nodes 2 x 48 core AMD zen4 6630 MB / core 4 x Nvidia H100 SXM5 2 nodes The large memory nodes are: 4 x 18 core Intel broadwell 41666 MB memory / core 8 nodes Total of 13.6 TFlops/s for all these nodes","title":"Current hardware in Kebnekaise"},{"location":"intro/#kebnekaise__-__hpc2n__storage","text":"Basically four types of storage are available at HPC2N: Home directory /home/X/Xyz , $HOME , ~ 25 GB, user owned Project storage /proj/nobackup/abc Shared among project members Local scratch space $SNIC_TMP SSD (170GB), per job, per node, \u201cvolatile\u201d Tape Storage Backup Long term storage Also SweStore \u2014 disk based (dCache) Research Data Storage Infrastructure, for active research data and operated by NAISS, WLCG","title":"Kebnekaise - HPC2N storage"},{"location":"intro/#kebnekaise__-__projects","text":"Compute projects To use Kebnekaise, you must be a member of a compute project . A compute project has a certain number of core hours allocated for it per month A regular CPU core cost 1 core hour per hour, other resources (e.g., GPUs) cost more Not a hard limit but projects that go over the allocation get lower priority A compute project contains a certain amount of storage. If more storage is required, you must be a member of a storage project . Note As Kebnekaise is a local cluster, you need to be affiliated with UmU, IRF, SLU, Miun, or LTU to use it. Projects are applied for through SUPR ( https://supr.naiss.se ). I will cover more details in a later section, where we go more into detail about HPC2N and Kebnekaise.","title":"Kebnekaise - projects"},{"location":"intro/#hpc","text":"What is HPC? High Performance Computing (definition) \u201cHigh Performance Computing most generally refers to the practice of aggregating computing power in a way that delivers much higher performance than one could get out of a typical desktop computer or workstation in order to solve large problems in science, engineering, or business.\u201d From: https://insidehpc.com/hpc-basic-training/what-is-hpc/","title":"HPC"},{"location":"intro/#high__performance__computing__-__opening__the__definition","text":"","title":"High Performance Computing - opening the definition"},{"location":"intro/#aggregating__computing__power","text":"147 nodes totalling 6808 cores and ??? CUDA cores Compared to 4 cores in a modern laptop","title":"Aggregating computing power"},{"location":"intro/#higher__performance","text":"More than 527,000,000,000,000 arithmetical operations per second (527 trillion (billion) Compared to 200,000,000,000 Flops in a modern laptop (200 billion (milliard)","title":"Higher performance"},{"location":"intro/#solve__large__problems","text":"When does a problem become large enough for HPC? Are there other reasons for using HPC resources? (Memory, software, support, etc.)","title":"Solve large problems"},{"location":"intro/#high__performance__computing__-__large__problems","text":"A problem can be large for two main reasons: Execution time : The time required to form a solution to the problem is very long Memory / storage use : The solution of the problem requires a lot of memory and/or storage The former can be remedied by increasing the performance More cores, more nodes, GPUs, \u2026 The latter by adding more memory / storage More memory per node (including large memory nodes), more nodes, \u2026 Kebnekaise: 128GB - 192GB, 384GB, 512GB, 768GB, 3TB Large storage solutions, \u2026","title":"High Performance Computing - large problems"},{"location":"intro/#high__performance__computing__-__what__counts__as__hpc","text":"","title":"High Performance Computing - what counts as HPC"},{"location":"intro/#high__performance__computing__-__other__reasons","text":"Specialized (expensive) hardware GPUs, including those optimized for AI Kebnekaise has V100, A100, A40, MI100, A6000, L40S, H100 High-end CPUs (AVX-512 etc) and ECC memory Software HPC2N holds licenses for several softwares Software is pre-configured and ready-to-use Support and documentation","title":"High Performance Computing - other reasons"},{"location":"intro/#high__performance__computing__-__memory__models","text":"Two memory models are relevant for HPC: Shared memory: Single memory space for all data. Everyone can access the same data Straightforward to use Distributed memory: Multiple distinct memory spaces. Everyone has direct access only to the local data Requires communication","title":"High Performance Computing - memory models"},{"location":"intro/#high__performance__computing__-__programming__models","text":"The programming model changes when we aim for extra performance and/or memory: Single-core: Matlab, Python, C, Fortran, \u2026 Single stream of operations Multi-core: Vectorized Matlab, pthreads, OpenMP Multiple streams of operations Work distribution, coordination (synchronization, etc), \u2026 Distributed memory: MPI, \u2026 Multiple streams of operations Work distribution, coordination (synchronization, etc), \u2026 Data distribution and communication GPUs: CUDA, OpenCL, OpenACC, OpenMP, \u2026 Many lightweight streams of operations Work distribution, coordination (synchronization, etc), \u2026 Data distribution across memory spaces and movement","title":"High Performance Computing - programming models"},{"location":"intro/#high__performance__computing__-__software","text":"Complexity grows when we aim for extra performance and/or memory/storage: Single-core: LAPACK, \u2026 Load correct toolchain etc Multi-core: LAPACK + parallel BLAS, \u2026 Load correct toolchain etc Allocate correct number of cores, configure software to use correct number of cores, \u2026 Distributed memory}: ScaLAPACK, \u2026 Load correct toolchain etc Allocate correct number of nodes and cores , configure software to use correct number of nodes and cores , \u2026 Data distribution, storage, \u2026 GPUs: MAGMA, TensorFlow, \u2026 Load correct toolchain etc Allocate correct number of cores and GPUs , configure software to use correct number of cores and GPUs , \u2026","title":"High Performance Computing - software"},{"location":"login/","text":"Logging in \u00b6 When you have your account, you can login to Kebnekaise. This can be done with any number of SSH clients or with ThinLinc (the easiest option if you need a graphical interface). Objectives Login to Kebnekaise, either with ThinLinc or your SSH client of choice. Kebnekaise login servers \u00b6 Note The main login node of Kebnekaise: kebnekaise.hpc2n.umu.se ThinLinc login node: kebnekaise-tl.hpc2n.umu.se ThinLinc through a browser (less features): https://kebnekaise-tl.hpc2n.umu.se:300/ In addition, there is a login node for the AMD-based nodes. We will talk more about this later: kebnekaise-amd.hpc2n.umu.se . For ThinLinc access: kebnekaise-amd-tl.hpc2n.umu.se ThinLinc is recommended for this course ThinLinc: a cross-platform remote desktop server from Cendio AB. Especially useful when you need software with a graphical interface. This is what we recommend you use for this course, unless you have a preferred SSH client. Using ThinLinc \u00b6 Download the client from https://www.cendio.com/thinlinc/download . Install it. Start the client. Enter the name of the server: kebnekaise-tl.hpc2n.umu.se . Enter your username. Go to \u201cOptions\u201d \\(->\\) \u201cSecurity\u201d. Check that authentication method is set to password. Go to \u201cOptions\u201d \\(->\\) \u201cScreen\u201d. Uncheck \u201cFull screen mode\u201d. Enter your HPC2N password. Click \u201cConnect\u201d Click \u201cContinue\u201d when you are being told that the server\u2019s host key is not in the registry. Wait for the ThinLinc desktop to open. Exercise Login to Kebnekaise. If you are using ThinLinc, first install the ThinLinc client. If you are using another SSH client, install it first if you have not already done so. Change password \u00b6 You get your first, temporary HPC2N password from this page: HPC2N passwords . That page can also be used to reset your HPC2N password if you have forgotten it. Note that you are authenticating through SUPR, using that service\u2019s login credentials! Warning The HPC2N password and the SUPR password are separate! The HPC2N password and your university/department password are also separate! Exercise: Change your password after first login ONLY do this if you have logged in for the first time/is still using the termporary password you got from the HPC2N password reset service! Changing password is done using the passwd command: passwd Use a good password that combines letters of different case. Do not use dictionary words. Avoid using the same password that you also use in other places. It will first ask for your current password. Type in that and press enter. Then type in the new password, enter, and repeat. You have changed the password. File transfers \u00b6 We are not going to transfer any files as part of this course, but you may have to do so as part of your workflow when using Kebnekaise (or another HPC centre) for your research. Linux, OS X \u00b6 scp \u00b6 SCP (Secure CoPy) is a simple way of transferring files between two machines that use the SSH (Secure SHell) protocol. You may use SCP to connect to any system where you have SSH (log-in) access. These examples show how to use scp from the command-line. Graphical programs exists for doing scp transfer. The command-lone scp program should already be installed. Remote to local Transfer a file from Kebnekaise to your local system, while on your local system scp username@kebnekaise.hpc2n.umu.se:file . Local to remote Transfer a local file to Kebnekaise, while on your local system scp file username@kebnekaise.hpc2n.umu.se:file Recursive directory copy from a local system to a remote system The directory sourcedirectory is here copied as a subdirectory to somedir scp -r sourcedirectory/ username@kebnekaise.hpc2n.umu.se:somedir/ sftp \u00b6 SFTP (SSH File Transfer Protocol or sometimes called Secure File Transfer Protocol) is a network protocol that provides file transfer over a reliable data stream. SFTP is a command -line program on most Unix, Linux, and Mac OS X systems. It is also available as a protocol choice in some graphical file transfer programs. Example: From a local system to a remote system enterprise-d [ ~ ] $ sftp user@kebnekaise.hpc2n.umu.se Connecting to kebnekaise.hpc2n.umu.se... user@kebnekaise.hpc2n.umu.se ' s password: sftp> put file.c C/file.c Uploading file.c to /home/u/user/C/file.c file.c 100 % 1 0 .0KB/s 00 :00 sftp> put -P irf.png pic/ Uploading irf.png to /home/u/user/pic/irf.png irf.png 100 % 2100 2 .1KB/s 00 :00 sftp> Windows \u00b6 Here you need to download a client: WinSCP, FileZilla (sftp), PSCP/PSFTP, \u2026 You can transfer with sftp or scp. There is documentation in HPC2N\u2019s documentation pages for Windows file transfers . Editors \u00b6 Since the editors on a Linux system are different to those you may be familiar with from Windows or macOS, here follows a short overview. There are command-line editors and graphical editors. If you are connecting with a regular SSH client, it will be simplest to use a command-line editor. If you are using ThinLinc, you can use command-line editors or graphical editors as you want. Command-line \u00b6 These are all good editors for using on the command line: nano vi , vim emacs They are all installed on Kebnekaise. Of these, vi/vim as well as emacs are probably the most powerful, though the latter is better in a GUI environment. The easiest editor to use if you are not familiar with any of them is nano . Nano Starting \u201cnano\u201d: Type nano FILENAME on the command line and press Enter . FILENAME is whatever you want to call your file. If FILENAME is a file that already exists, nano will open the file. If it dows not exist, it will be created. You now get an editor that looks like this: First thing to notice is that many of the commands are listed at the bottom. The ^ before the letter-commands means you should press CTRL and then the letter (while keeping CTRL down). Your prompt is in the editor window itself, and you can just type (or copy and paste) the content you want in your file. When you want to exit (and possibly save), you press CTRL and then x while holding CTRL down (this is written CTRL-x or ^x ). nano will ask you if you want to save the content of the buffer to the file. After that it will exit. There is a manual for nano here . GUI \u00b6 If you are connecting with ThinLinc , you will be presented with a graphical user interface (GUI). From there you can either open a terminal window/shell ( Applications -> System Tools -> MATE Terminal ) or you can choose editors from the menu by going to Applications -> Accessories . This gives several editor options, of which these have a graphical interface: Text Editor (gedit) Pluma - the default editor on the MATE desktop environments (that Thinlinc runs) Atom - not just an editor, but an IDE Emacs (GUI) NEdit \u201cNirvana Text Editor\u201d If you are not familiar with any of these, a good recommendation would be to use Text Editor/gedit . Text Editor/gedit Starting \u201c gedit \u201d: From the menu, choose Applications -> Accessories -> Text Editor . You then get a window that looks like this: You can open files by clicking \u201c Open \u201d in the top menu. Clicking the small file icon with a green plus will create a new document. Save by clicking \u201c Save \u201d in the menu. The menu on the top right (the three horizontal lines) gives you several other options, including \u201c Find \u201d and \u201c Find and Replace \u201d. Keypoints You can login with ThinLinc or another SSH client ThinLinc is easiest if you need a GUI There are several command-line editors: vi/vim, nano, emacs, \u2026 And several GUI editors, which works best when using ThinLinc: gedit, pluma, atom, emacs (gui), nedit, \u2026","title":"Logging in"},{"location":"login/#logging__in","text":"When you have your account, you can login to Kebnekaise. This can be done with any number of SSH clients or with ThinLinc (the easiest option if you need a graphical interface). Objectives Login to Kebnekaise, either with ThinLinc or your SSH client of choice.","title":"Logging in"},{"location":"login/#kebnekaise__login__servers","text":"Note The main login node of Kebnekaise: kebnekaise.hpc2n.umu.se ThinLinc login node: kebnekaise-tl.hpc2n.umu.se ThinLinc through a browser (less features): https://kebnekaise-tl.hpc2n.umu.se:300/ In addition, there is a login node for the AMD-based nodes. We will talk more about this later: kebnekaise-amd.hpc2n.umu.se . For ThinLinc access: kebnekaise-amd-tl.hpc2n.umu.se ThinLinc is recommended for this course ThinLinc: a cross-platform remote desktop server from Cendio AB. Especially useful when you need software with a graphical interface. This is what we recommend you use for this course, unless you have a preferred SSH client.","title":"Kebnekaise login servers"},{"location":"login/#using__thinlinc","text":"Download the client from https://www.cendio.com/thinlinc/download . Install it. Start the client. Enter the name of the server: kebnekaise-tl.hpc2n.umu.se . Enter your username. Go to \u201cOptions\u201d \\(->\\) \u201cSecurity\u201d. Check that authentication method is set to password. Go to \u201cOptions\u201d \\(->\\) \u201cScreen\u201d. Uncheck \u201cFull screen mode\u201d. Enter your HPC2N password. Click \u201cConnect\u201d Click \u201cContinue\u201d when you are being told that the server\u2019s host key is not in the registry. Wait for the ThinLinc desktop to open. Exercise Login to Kebnekaise. If you are using ThinLinc, first install the ThinLinc client. If you are using another SSH client, install it first if you have not already done so.","title":"Using ThinLinc"},{"location":"login/#change__password","text":"You get your first, temporary HPC2N password from this page: HPC2N passwords . That page can also be used to reset your HPC2N password if you have forgotten it. Note that you are authenticating through SUPR, using that service\u2019s login credentials! Warning The HPC2N password and the SUPR password are separate! The HPC2N password and your university/department password are also separate! Exercise: Change your password after first login ONLY do this if you have logged in for the first time/is still using the termporary password you got from the HPC2N password reset service! Changing password is done using the passwd command: passwd Use a good password that combines letters of different case. Do not use dictionary words. Avoid using the same password that you also use in other places. It will first ask for your current password. Type in that and press enter. Then type in the new password, enter, and repeat. You have changed the password.","title":"Change password"},{"location":"login/#file__transfers","text":"We are not going to transfer any files as part of this course, but you may have to do so as part of your workflow when using Kebnekaise (or another HPC centre) for your research.","title":"File transfers"},{"location":"login/#linux__os__x","text":"","title":"Linux, OS X"},{"location":"login/#scp","text":"SCP (Secure CoPy) is a simple way of transferring files between two machines that use the SSH (Secure SHell) protocol. You may use SCP to connect to any system where you have SSH (log-in) access. These examples show how to use scp from the command-line. Graphical programs exists for doing scp transfer. The command-lone scp program should already be installed. Remote to local Transfer a file from Kebnekaise to your local system, while on your local system scp username@kebnekaise.hpc2n.umu.se:file . Local to remote Transfer a local file to Kebnekaise, while on your local system scp file username@kebnekaise.hpc2n.umu.se:file Recursive directory copy from a local system to a remote system The directory sourcedirectory is here copied as a subdirectory to somedir scp -r sourcedirectory/ username@kebnekaise.hpc2n.umu.se:somedir/","title":"scp"},{"location":"login/#sftp","text":"SFTP (SSH File Transfer Protocol or sometimes called Secure File Transfer Protocol) is a network protocol that provides file transfer over a reliable data stream. SFTP is a command -line program on most Unix, Linux, and Mac OS X systems. It is also available as a protocol choice in some graphical file transfer programs. Example: From a local system to a remote system enterprise-d [ ~ ] $ sftp user@kebnekaise.hpc2n.umu.se Connecting to kebnekaise.hpc2n.umu.se... user@kebnekaise.hpc2n.umu.se ' s password: sftp> put file.c C/file.c Uploading file.c to /home/u/user/C/file.c file.c 100 % 1 0 .0KB/s 00 :00 sftp> put -P irf.png pic/ Uploading irf.png to /home/u/user/pic/irf.png irf.png 100 % 2100 2 .1KB/s 00 :00 sftp>","title":"sftp"},{"location":"login/#windows","text":"Here you need to download a client: WinSCP, FileZilla (sftp), PSCP/PSFTP, \u2026 You can transfer with sftp or scp. There is documentation in HPC2N\u2019s documentation pages for Windows file transfers .","title":"Windows"},{"location":"login/#editors","text":"Since the editors on a Linux system are different to those you may be familiar with from Windows or macOS, here follows a short overview. There are command-line editors and graphical editors. If you are connecting with a regular SSH client, it will be simplest to use a command-line editor. If you are using ThinLinc, you can use command-line editors or graphical editors as you want.","title":"Editors"},{"location":"login/#command-line","text":"These are all good editors for using on the command line: nano vi , vim emacs They are all installed on Kebnekaise. Of these, vi/vim as well as emacs are probably the most powerful, though the latter is better in a GUI environment. The easiest editor to use if you are not familiar with any of them is nano . Nano Starting \u201cnano\u201d: Type nano FILENAME on the command line and press Enter . FILENAME is whatever you want to call your file. If FILENAME is a file that already exists, nano will open the file. If it dows not exist, it will be created. You now get an editor that looks like this: First thing to notice is that many of the commands are listed at the bottom. The ^ before the letter-commands means you should press CTRL and then the letter (while keeping CTRL down). Your prompt is in the editor window itself, and you can just type (or copy and paste) the content you want in your file. When you want to exit (and possibly save), you press CTRL and then x while holding CTRL down (this is written CTRL-x or ^x ). nano will ask you if you want to save the content of the buffer to the file. After that it will exit. There is a manual for nano here .","title":"Command-line"},{"location":"login/#gui","text":"If you are connecting with ThinLinc , you will be presented with a graphical user interface (GUI). From there you can either open a terminal window/shell ( Applications -> System Tools -> MATE Terminal ) or you can choose editors from the menu by going to Applications -> Accessories . This gives several editor options, of which these have a graphical interface: Text Editor (gedit) Pluma - the default editor on the MATE desktop environments (that Thinlinc runs) Atom - not just an editor, but an IDE Emacs (GUI) NEdit \u201cNirvana Text Editor\u201d If you are not familiar with any of these, a good recommendation would be to use Text Editor/gedit . Text Editor/gedit Starting \u201c gedit \u201d: From the menu, choose Applications -> Accessories -> Text Editor . You then get a window that looks like this: You can open files by clicking \u201c Open \u201d in the top menu. Clicking the small file icon with a green plus will create a new document. Save by clicking \u201c Save \u201d in the menu. The menu on the top right (the three horizontal lines) gives you several other options, including \u201c Find \u201d and \u201c Find and Replace \u201d. Keypoints You can login with ThinLinc or another SSH client ThinLinc is easiest if you need a GUI There are several command-line editors: vi/vim, nano, emacs, \u2026 And several GUI editors, which works best when using ThinLinc: gedit, pluma, atom, emacs (gui), nedit, \u2026","title":"GUI"},{"location":"modules/","text":"The Module System (Lmod) \u00b6 Objectives Learn the basics of the module system which is used to access most of the software on Kebnekaise Try some of the most used commands for the module system: find/list software modules load/unload software modules Learn about compiler toolchains Most programs are accessed by first loading them as a \u2018module\u2019. Modules are: used to set up your environment (paths to executables, libraries, etc.) for using a particular (set of) software package(s) a tool to help users manage their Unix/Linux shell environment, allowing groups of related environment-variable settings to be made or removed dynamically allows having multiple versions of a program or package available by just loading the proper module are installed in a hierarchial layout. This means that some modules are only available after loading a specific compiler and/or MPI version. Useful commands (Lmod) \u00b6 See which modules exists: module spider or ml spider See which versions exist of a specific module: module spider MODULE or ml spider MODULE See prerequisites and how to load a specfic version of a module: module spider MODULE/VERSION or ml spider MODULE/VERSION List modules depending only on what is currently loaded: module avail or ml av See which modules are currently loaded: module list or ml Loading a module: module load MODULE or ml MODULE Loading a specific version of a module: module load MODULE/VERSION or ml MODULE/VERSION Unload a module: module unload MODULE or ml -MODULE Get more information about a module: ml show MODULE or module show MODULE Unload all modules except the \u2018sticky\u2019 modules: module purge or ml purge Hint Code-along! Example: checking which versions exist of the module \u2018Python\u2019 b-an01 [ ~ ] $ ml spider Python --------------------------------------------------------------------------------------------------------- Python: --------------------------------------------------------------------------------------------------------- Description: Python is a programming language that lets you work more quickly and integrate your systems more effectively. Versions: Python/2.7.15 Python/2.7.16 Python/2.7.18-bare Python/2.7.18 Python/3.7.2 Python/3.7.4 Python/3.8.2 Python/3.8.6 Python/3.9.5-bare Python/3.9.5 Python/3.9.6-bare Python/3.9.6 Python/3.10.4-bare Python/3.10.4 Python/3.10.8-bare Python/3.10.8 Python/3.11.3 Python/3.11.5 Other possible modules matches: Biopython Boost.Python Brotli-python GitPython IPython Python-bundle-PyPI flatbuffers-python ... --------------------------------------------------------------------------------------------------------- To find other possible module matches execute: $ module -r spider '.*Python.*' --------------------------------------------------------------------------------------------------------- For detailed information about a specific \"Python\" package ( including how to load the modules ) use the module ' s full name. Note that names that have a trailing ( E ) are extensions provided by other modules. For example: $ module spider Python/3.11.5 --------------------------------------------------------------------------------------------------------- b-an01 [ ~ ] $ Example: Check how to load a specific Python version (3.11.5 in this example) b-an01 [ ~ ] $ ml spider Python/3.11.5 --------------------------------------------------------------------------------------------------------- Python: Python/3.11.5 --------------------------------------------------------------------------------------------------------- Description: Python is a programming language that lets you work more quickly and integrate your systems more effectively. You will need to load all module ( s ) on any one of the lines below before the \"Python/3.11.5\" module is available to load. GCCcore/13.2.0 This module provides the following extensions: flit_core/3.9.0 ( E ) , packaging/23.2 ( E ) , pip/23.2.1 ( E ) , setuptools-scm/8.0.4 ( E ) , setuptools/68.2.2 ( E ) , tomli/2.0.1 ( E ) , typing_extensions/4.8.0 ( E ) , wheel/0.41.2 ( E ) Help: Description =========== Python is a programming language that lets you work more quickly and integrate your systems more effectively. More information ================ - Homepage: https://python.org/ Included extensions =================== flit_core-3.9.0, packaging-23.2, pip-23.2.1, setuptools-68.2.2, setuptools- scm-8.0.4, tomli-2.0.1, typing_extensions-4.8.0, wheel-0.41.2 b-an01 [ ~ ] $ Example: Load Python/3.11.5 and its prerequisite(s) Here we also show the loaded module before and after the load. For illustration, we use first ml and then module list : b-an01 [ ~ ] $ ml Currently Loaded Modules: 1 ) snicenvironment ( S ) 2 ) systemdefault ( S ) Where: S: Module is Sticky, requires --force to unload or purge b-an01 [ ~ ] $ module load GCCcore/13.2.0 Python/3.11.5 b-an01 [ ~ ] $ module list Currently Loaded Modules: 1 ) snicenvironment ( S ) 4 ) zlib/1.2.13 7 ) ncurses/6.4 10 ) SQLite/3.43.1 13 ) OpenSSL/1.1 2 ) systemdefault ( S ) 5 ) binutils/2.40 8 ) libreadline/8.2 11 ) XZ/5.4.4 14 ) Python/3.11.5 3 ) GCCcore/13.2.0 6 ) bzip2/1.0.8 9 ) Tcl/8.6.13 12 ) libffi/3.4.4 Where: S: Module is Sticky, requires --force to unload or purge b-an01 [ ~ ] $ Example: Unloading the module Python/3.11.5 In this example we unload the module Python/3.11.5 , but not the prerequisite GCCcore/13.2.0 . We also look at the output of module list before and after. b-an01 [ ~ ] $ module list Currently Loaded Modules: 1 ) snicenvironment ( S ) 4 ) zlib/1.2.13 7 ) ncurses/6.4 10 ) SQLite/3.43.1 13 ) OpenSSL/1.1 2 ) systemdefault ( S ) 5 ) binutils/2.40 8 ) libreadline/8.2 11 ) XZ/5.4.4 14 ) Python/3.11.5 3 ) GCCcore/13.2.0 6 ) bzip2/1.0.8 9 ) Tcl/8.6.13 12 ) libffi/3.4.4 Where: S: Module is Sticky, requires --force to unload or purge b-an01 [ ~ ] $ ml unload Python/3.11.5 b-an01 [ ~ ] $ module list Currently Loaded Modules: 1 ) snicenvironment ( S ) 2 ) systemdefault ( S ) 3 ) GCCcore/13.2.0 Where: S: Module is Sticky, requires --force to unload or purge b-an01 [ ~ ] $ As you can see, the prerequisite did not get unloaded. This is on purpose, because you may have other things loaded which uses the prerequisite. Example: unloading every module you have loaded, with module purge except the \u2018sticky\u2019 modules (some needed things for the environment) First we load some modules. Here Python 3.11.5, SciPy-bundle, and prerequisites for them. We also do module list after loading the modules and after using module purge . b-an01 [ ~ ] $ ml GCC/13.2.0 b-an01 [ ~ ] $ ml Python/3.11.5 ml SciPy-bundle/2023.11 b-an01 [ ~ ] $ ml list Currently Loaded Modules: 1 ) snicenvironment ( S ) 7 ) bzip2/1.0.8 13 ) libffi/3.4.4 19 ) cffi/1.15.1 2 ) systemdefault ( S ) 8 ) ncurses/6.4 14 ) OpenSSL/1.1 20 ) cryptography/41.0.5 3 ) GCCcore/13.2.0 9 ) libreadline/8.2 15 ) Python/3.11.5 21 ) virtualenv/20.24.6 4 ) zlib/1.2.13 10 ) Tcl/8.6.13 16 ) OpenBLAS/0.3.24 22 ) Python-bundle-PyPI/2023.10 5 ) binutils/2.40 11 ) SQLite/3.43.1 17 ) FlexiBLAS/3.3.1 23 ) pybind11/2.11.1 6 ) GCC/13.2.0 12 ) XZ/5.4.4 18 ) FFTW/3.3.10 24 ) SciPy-bundle/2023.11 Where: S: Module is Sticky, requires --force to unload or purge b-an01 [ ~ ] $ ml purge The following modules were not unloaded: ( Use \"module --force purge\" to unload all ) : 1 ) snicenvironment 2 ) systemdefault b-an01 [ ~ ] $ ml list Currently Loaded Modules: 1 ) snicenvironment ( S ) 2 ) systemdefault ( S ) Where: S: Module is Sticky, requires --force to unload or purge b-an01 [ ~ ] $ Note You can do several module load on the same line. Or you can do them one at a time, as you want. The modules have to be loaded in order! You cannot list the prerequisite after the module that needs it! One advantage to loading modules one at a time is that you can then find compatible modules that depend on that version easily. Example: you have loaded GCC/13.2.0 and Python/3.11.5 . You can now do ml av to see which versions of other modules you want to load, say SciPy-bundle, are compatible. If you know the name of the module you want, you can even start writing module load SciPy-bundle/ and press TAB - the system will then autocomplete to the compatible one(s). Compiler Toolchains \u00b6 Compiler toolchains load bundles of software making up a complete environment for compiling/using a specific prebuilt software. Includes some/all of: compiler suite, MPI, BLAS, LAPACK, ScaLapack, FFTW, CUDA. Some currently available toolchains (check ml av for versions and full, updated list): GCC : GCC only gcccuda : GCC and CUDA foss : GCC, OpenMPI, OpenBLAS/LAPACK, FFTW, ScaLAPACK gompi : GCC, OpenMPI gompic : GCC, OpenMPI, CUDA gomkl : GCC, OpenMPI, MKL iccifort : icc, ifort iccifortcuda : icc, ifort, CUDA iimpi : icc, ifort, IntelMPI iimpic : iccifort, CUDA, impi intel : icc, ifort, IntelMPI, IntelMKL intel-compilers : icc, ifort (classic and oneAPI) intelcuda : intel and CUDA iompi : iccifort and OpenMPI Keypoints The software on Kebnekaise is mostly accessed through the module system. The modules are arranged in a hierarchial layout; many modules have prerequisites that needs to be loaded first. Important commands to the module system: Loading: module load MODULE Unloading: module unload MODULE Unload all modules: module purge List all modules in the system: module spider List versions available of a specific module: module spider MODULE Show how to load a specific module and version: module spider MODULE/VERSION List the modules you have currently loaded: module list Compiler toolchains are modules containing compiler suites and various libraries More information There is more information about the module system and how to work with it in HPC2N\u2019s documentation for the modules system .","title":"The Module System"},{"location":"modules/#the__module__system__lmod","text":"Objectives Learn the basics of the module system which is used to access most of the software on Kebnekaise Try some of the most used commands for the module system: find/list software modules load/unload software modules Learn about compiler toolchains Most programs are accessed by first loading them as a \u2018module\u2019. Modules are: used to set up your environment (paths to executables, libraries, etc.) for using a particular (set of) software package(s) a tool to help users manage their Unix/Linux shell environment, allowing groups of related environment-variable settings to be made or removed dynamically allows having multiple versions of a program or package available by just loading the proper module are installed in a hierarchial layout. This means that some modules are only available after loading a specific compiler and/or MPI version.","title":"The Module System (Lmod)"},{"location":"modules/#useful__commands__lmod","text":"See which modules exists: module spider or ml spider See which versions exist of a specific module: module spider MODULE or ml spider MODULE See prerequisites and how to load a specfic version of a module: module spider MODULE/VERSION or ml spider MODULE/VERSION List modules depending only on what is currently loaded: module avail or ml av See which modules are currently loaded: module list or ml Loading a module: module load MODULE or ml MODULE Loading a specific version of a module: module load MODULE/VERSION or ml MODULE/VERSION Unload a module: module unload MODULE or ml -MODULE Get more information about a module: ml show MODULE or module show MODULE Unload all modules except the \u2018sticky\u2019 modules: module purge or ml purge Hint Code-along! Example: checking which versions exist of the module \u2018Python\u2019 b-an01 [ ~ ] $ ml spider Python --------------------------------------------------------------------------------------------------------- Python: --------------------------------------------------------------------------------------------------------- Description: Python is a programming language that lets you work more quickly and integrate your systems more effectively. Versions: Python/2.7.15 Python/2.7.16 Python/2.7.18-bare Python/2.7.18 Python/3.7.2 Python/3.7.4 Python/3.8.2 Python/3.8.6 Python/3.9.5-bare Python/3.9.5 Python/3.9.6-bare Python/3.9.6 Python/3.10.4-bare Python/3.10.4 Python/3.10.8-bare Python/3.10.8 Python/3.11.3 Python/3.11.5 Other possible modules matches: Biopython Boost.Python Brotli-python GitPython IPython Python-bundle-PyPI flatbuffers-python ... --------------------------------------------------------------------------------------------------------- To find other possible module matches execute: $ module -r spider '.*Python.*' --------------------------------------------------------------------------------------------------------- For detailed information about a specific \"Python\" package ( including how to load the modules ) use the module ' s full name. Note that names that have a trailing ( E ) are extensions provided by other modules. For example: $ module spider Python/3.11.5 --------------------------------------------------------------------------------------------------------- b-an01 [ ~ ] $ Example: Check how to load a specific Python version (3.11.5 in this example) b-an01 [ ~ ] $ ml spider Python/3.11.5 --------------------------------------------------------------------------------------------------------- Python: Python/3.11.5 --------------------------------------------------------------------------------------------------------- Description: Python is a programming language that lets you work more quickly and integrate your systems more effectively. You will need to load all module ( s ) on any one of the lines below before the \"Python/3.11.5\" module is available to load. GCCcore/13.2.0 This module provides the following extensions: flit_core/3.9.0 ( E ) , packaging/23.2 ( E ) , pip/23.2.1 ( E ) , setuptools-scm/8.0.4 ( E ) , setuptools/68.2.2 ( E ) , tomli/2.0.1 ( E ) , typing_extensions/4.8.0 ( E ) , wheel/0.41.2 ( E ) Help: Description =========== Python is a programming language that lets you work more quickly and integrate your systems more effectively. More information ================ - Homepage: https://python.org/ Included extensions =================== flit_core-3.9.0, packaging-23.2, pip-23.2.1, setuptools-68.2.2, setuptools- scm-8.0.4, tomli-2.0.1, typing_extensions-4.8.0, wheel-0.41.2 b-an01 [ ~ ] $ Example: Load Python/3.11.5 and its prerequisite(s) Here we also show the loaded module before and after the load. For illustration, we use first ml and then module list : b-an01 [ ~ ] $ ml Currently Loaded Modules: 1 ) snicenvironment ( S ) 2 ) systemdefault ( S ) Where: S: Module is Sticky, requires --force to unload or purge b-an01 [ ~ ] $ module load GCCcore/13.2.0 Python/3.11.5 b-an01 [ ~ ] $ module list Currently Loaded Modules: 1 ) snicenvironment ( S ) 4 ) zlib/1.2.13 7 ) ncurses/6.4 10 ) SQLite/3.43.1 13 ) OpenSSL/1.1 2 ) systemdefault ( S ) 5 ) binutils/2.40 8 ) libreadline/8.2 11 ) XZ/5.4.4 14 ) Python/3.11.5 3 ) GCCcore/13.2.0 6 ) bzip2/1.0.8 9 ) Tcl/8.6.13 12 ) libffi/3.4.4 Where: S: Module is Sticky, requires --force to unload or purge b-an01 [ ~ ] $ Example: Unloading the module Python/3.11.5 In this example we unload the module Python/3.11.5 , but not the prerequisite GCCcore/13.2.0 . We also look at the output of module list before and after. b-an01 [ ~ ] $ module list Currently Loaded Modules: 1 ) snicenvironment ( S ) 4 ) zlib/1.2.13 7 ) ncurses/6.4 10 ) SQLite/3.43.1 13 ) OpenSSL/1.1 2 ) systemdefault ( S ) 5 ) binutils/2.40 8 ) libreadline/8.2 11 ) XZ/5.4.4 14 ) Python/3.11.5 3 ) GCCcore/13.2.0 6 ) bzip2/1.0.8 9 ) Tcl/8.6.13 12 ) libffi/3.4.4 Where: S: Module is Sticky, requires --force to unload or purge b-an01 [ ~ ] $ ml unload Python/3.11.5 b-an01 [ ~ ] $ module list Currently Loaded Modules: 1 ) snicenvironment ( S ) 2 ) systemdefault ( S ) 3 ) GCCcore/13.2.0 Where: S: Module is Sticky, requires --force to unload or purge b-an01 [ ~ ] $ As you can see, the prerequisite did not get unloaded. This is on purpose, because you may have other things loaded which uses the prerequisite. Example: unloading every module you have loaded, with module purge except the \u2018sticky\u2019 modules (some needed things for the environment) First we load some modules. Here Python 3.11.5, SciPy-bundle, and prerequisites for them. We also do module list after loading the modules and after using module purge . b-an01 [ ~ ] $ ml GCC/13.2.0 b-an01 [ ~ ] $ ml Python/3.11.5 ml SciPy-bundle/2023.11 b-an01 [ ~ ] $ ml list Currently Loaded Modules: 1 ) snicenvironment ( S ) 7 ) bzip2/1.0.8 13 ) libffi/3.4.4 19 ) cffi/1.15.1 2 ) systemdefault ( S ) 8 ) ncurses/6.4 14 ) OpenSSL/1.1 20 ) cryptography/41.0.5 3 ) GCCcore/13.2.0 9 ) libreadline/8.2 15 ) Python/3.11.5 21 ) virtualenv/20.24.6 4 ) zlib/1.2.13 10 ) Tcl/8.6.13 16 ) OpenBLAS/0.3.24 22 ) Python-bundle-PyPI/2023.10 5 ) binutils/2.40 11 ) SQLite/3.43.1 17 ) FlexiBLAS/3.3.1 23 ) pybind11/2.11.1 6 ) GCC/13.2.0 12 ) XZ/5.4.4 18 ) FFTW/3.3.10 24 ) SciPy-bundle/2023.11 Where: S: Module is Sticky, requires --force to unload or purge b-an01 [ ~ ] $ ml purge The following modules were not unloaded: ( Use \"module --force purge\" to unload all ) : 1 ) snicenvironment 2 ) systemdefault b-an01 [ ~ ] $ ml list Currently Loaded Modules: 1 ) snicenvironment ( S ) 2 ) systemdefault ( S ) Where: S: Module is Sticky, requires --force to unload or purge b-an01 [ ~ ] $ Note You can do several module load on the same line. Or you can do them one at a time, as you want. The modules have to be loaded in order! You cannot list the prerequisite after the module that needs it! One advantage to loading modules one at a time is that you can then find compatible modules that depend on that version easily. Example: you have loaded GCC/13.2.0 and Python/3.11.5 . You can now do ml av to see which versions of other modules you want to load, say SciPy-bundle, are compatible. If you know the name of the module you want, you can even start writing module load SciPy-bundle/ and press TAB - the system will then autocomplete to the compatible one(s).","title":"Useful commands (Lmod)"},{"location":"modules/#compiler__toolchains","text":"Compiler toolchains load bundles of software making up a complete environment for compiling/using a specific prebuilt software. Includes some/all of: compiler suite, MPI, BLAS, LAPACK, ScaLapack, FFTW, CUDA. Some currently available toolchains (check ml av for versions and full, updated list): GCC : GCC only gcccuda : GCC and CUDA foss : GCC, OpenMPI, OpenBLAS/LAPACK, FFTW, ScaLAPACK gompi : GCC, OpenMPI gompic : GCC, OpenMPI, CUDA gomkl : GCC, OpenMPI, MKL iccifort : icc, ifort iccifortcuda : icc, ifort, CUDA iimpi : icc, ifort, IntelMPI iimpic : iccifort, CUDA, impi intel : icc, ifort, IntelMPI, IntelMKL intel-compilers : icc, ifort (classic and oneAPI) intelcuda : intel and CUDA iompi : iccifort and OpenMPI Keypoints The software on Kebnekaise is mostly accessed through the module system. The modules are arranged in a hierarchial layout; many modules have prerequisites that needs to be loaded first. Important commands to the module system: Loading: module load MODULE Unloading: module unload MODULE Unload all modules: module purge List all modules in the system: module spider List versions available of a specific module: module spider MODULE Show how to load a specific module and version: module spider MODULE/VERSION List the modules you have currently loaded: module list Compiler toolchains are modules containing compiler suites and various libraries More information There is more information about the module system and how to work with it in HPC2N\u2019s documentation for the modules system .","title":"Compiler Toolchains"},{"location":"projectsaccounts/","text":"Projects - compute and storage \u00b6 Note In order to have an account at HPC2N, you need to be a member of a compute project. You can either join a project or apply for one yourself (if you fulfill the requirements). There are both storage projects and compute projects. The storage projects are for when the amount of storage included with the compute project is not enough. Kebnekaise is only open for local project requests! The PI must be affiliated with UmU, LTU, IRF, MiUN, or SLU. You can still add members (join) from anywhere. Application process \u00b6 Apply for compute projects in SUPR . Login to SUPR (create SUPR account if you do not have one). Click \u201cRounds\u201d in the left menu. Pick \u201cCompute Rounds\u201d. Pick \u201cCentre Local Compute\u201d. Pick \u201cHPC2N Local Compute YYYY\u201d. Choose \u201cCreate New Proposal for HPC2N Local Compute YYYY\u201d. Create from scratch or use earlier proposal as starting point. Agree to the default storage if 500GB is enough. More information: https://supr.naiss.se/round/open_or_pending_type/?type=Centre+Local+Compute If the above mentioned default storage is not enough, you will need to apply for a Local storage project : https://supr.naiss.se/round/open_or_pending_type/?type=Centre+Local+Storage Info As default, you have 25GB in your home directory. If you need more, you/your PI can accept the \u201cdefault storage\u201d you will be offered after applying for compute resources. The default storage is 500GB. If you need more than that, you/your PI will have to apply for a storage project. When you have both, link them together. It is done from the storage project. This way all members of the compute project also becomes members of the storage project. After applying on SUPR, the project(s) will be reviewed. Linking a compute project to a storage project \u00b6 Before linking (SUPR): 2. Pick a compute project to link: 3. Showing linked projects: 4. Members of the storage project after linking: Accounts \u00b6 When you have a project / have become member of a project, you can apply for an account at HPC2N. This is done in SUPR, under \u201cAccounts\u201d: https://supr.naiss.se/account/ . Your account request will be processed within a week. You will then get an email with information about logging in and links to getting started information. More information on the account process can be found on HPC2N\u2019s documentation pages: https://www.hpc2n.umu.se/documentation/access-and-accounts/users","title":"Projects and Accounts"},{"location":"projectsaccounts/#projects__-__compute__and__storage","text":"Note In order to have an account at HPC2N, you need to be a member of a compute project. You can either join a project or apply for one yourself (if you fulfill the requirements). There are both storage projects and compute projects. The storage projects are for when the amount of storage included with the compute project is not enough. Kebnekaise is only open for local project requests! The PI must be affiliated with UmU, LTU, IRF, MiUN, or SLU. You can still add members (join) from anywhere.","title":"Projects - compute and storage"},{"location":"projectsaccounts/#application__process","text":"Apply for compute projects in SUPR . Login to SUPR (create SUPR account if you do not have one). Click \u201cRounds\u201d in the left menu. Pick \u201cCompute Rounds\u201d. Pick \u201cCentre Local Compute\u201d. Pick \u201cHPC2N Local Compute YYYY\u201d. Choose \u201cCreate New Proposal for HPC2N Local Compute YYYY\u201d. Create from scratch or use earlier proposal as starting point. Agree to the default storage if 500GB is enough. More information: https://supr.naiss.se/round/open_or_pending_type/?type=Centre+Local+Compute If the above mentioned default storage is not enough, you will need to apply for a Local storage project : https://supr.naiss.se/round/open_or_pending_type/?type=Centre+Local+Storage Info As default, you have 25GB in your home directory. If you need more, you/your PI can accept the \u201cdefault storage\u201d you will be offered after applying for compute resources. The default storage is 500GB. If you need more than that, you/your PI will have to apply for a storage project. When you have both, link them together. It is done from the storage project. This way all members of the compute project also becomes members of the storage project. After applying on SUPR, the project(s) will be reviewed.","title":"Application process"},{"location":"projectsaccounts/#linking__a__compute__project__to__a__storage__project","text":"Before linking (SUPR): 2. Pick a compute project to link: 3. Showing linked projects: 4. Members of the storage project after linking:","title":"Linking a compute project to a storage project"},{"location":"projectsaccounts/#accounts","text":"When you have a project / have become member of a project, you can apply for an account at HPC2N. This is done in SUPR, under \u201cAccounts\u201d: https://supr.naiss.se/account/ . Your account request will be processed within a week. You will then get an email with information about logging in and links to getting started information. More information on the account process can be found on HPC2N\u2019s documentation pages: https://www.hpc2n.umu.se/documentation/access-and-accounts/users","title":"Accounts"}]} \ No newline at end of file diff --git a/search/worker.js b/search/worker.js new file mode 100644 index 00000000..8628dbce --- /dev/null +++ b/search/worker.js @@ -0,0 +1,133 @@ +var base_path = 'function' === typeof importScripts ? '.' : '/search/'; +var allowSearch = false; +var index; +var documents = {}; +var lang = ['en']; +var data; + +function getScript(script, callback) { + console.log('Loading script: ' + script); + $.getScript(base_path + script).done(function () { + callback(); + }).fail(function (jqxhr, settings, exception) { + console.log('Error: ' + exception); + }); +} + +function getScriptsInOrder(scripts, callback) { + if (scripts.length === 0) { + callback(); + return; + } + getScript(scripts[0], function() { + getScriptsInOrder(scripts.slice(1), callback); + }); +} + +function loadScripts(urls, callback) { + if( 'function' === typeof importScripts ) { + importScripts.apply(null, urls); + callback(); + } else { + getScriptsInOrder(urls, callback); + } +} + +function onJSONLoaded () { + data = JSON.parse(this.responseText); + var scriptsToLoad = ['lunr.js']; + if (data.config && data.config.lang && data.config.lang.length) { + lang = data.config.lang; + } + if (lang.length > 1 || lang[0] !== "en") { + scriptsToLoad.push('lunr.stemmer.support.js'); + if (lang.length > 1) { + scriptsToLoad.push('lunr.multi.js'); + } + if (lang.includes("ja") || lang.includes("jp")) { + scriptsToLoad.push('tinyseg.js'); + } + for (var i=0; i < lang.length; i++) { + if (lang[i] != 'en') { + scriptsToLoad.push(['lunr', lang[i], 'js'].join('.')); + } + } + } + loadScripts(scriptsToLoad, onScriptsLoaded); +} + +function onScriptsLoaded () { + console.log('All search scripts loaded, building Lunr index...'); + if (data.config && data.config.separator && data.config.separator.length) { + lunr.tokenizer.separator = new RegExp(data.config.separator); + } + + if (data.index) { + index = lunr.Index.load(data.index); + data.docs.forEach(function (doc) { + documents[doc.location] = doc; + }); + console.log('Lunr pre-built index loaded, search ready'); + } else { + index = lunr(function () { + if (lang.length === 1 && lang[0] !== "en" && lunr[lang[0]]) { + this.use(lunr[lang[0]]); + } else if (lang.length > 1) { + this.use(lunr.multiLanguage.apply(null, lang)); // spread operator not supported in all browsers: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/Spread_operator#Browser_compatibility + } + this.field('title'); + this.field('text'); + this.ref('location'); + + for (var i=0; i < data.docs.length; i++) { + var doc = data.docs[i]; + this.add(doc); + documents[doc.location] = doc; + } + }); + console.log('Lunr index built, search ready'); + } + allowSearch = true; + postMessage({config: data.config}); + postMessage({allowSearch: allowSearch}); +} + +function init () { + var oReq = new XMLHttpRequest(); + oReq.addEventListener("load", onJSONLoaded); + var index_path = base_path + '/search_index.json'; + if( 'function' === typeof importScripts ){ + index_path = 'search_index.json'; + } + oReq.open("GET", index_path); + oReq.send(); +} + +function search (query) { + if (!allowSearch) { + console.error('Assets for search still loading'); + return; + } + + var resultDocuments = []; + var results = index.search(query); + for (var i=0; i < results.length; i++){ + var result = results[i]; + doc = documents[result.ref]; + doc.summary = doc.text.substring(0, 200); + resultDocuments.push(doc); + } + return resultDocuments; +} + +if( 'function' === typeof importScripts ) { + onmessage = function (e) { + if (e.data.init) { + init(); + } else if (e.data.query) { + postMessage({ results: search(e.data.query) }); + } else { + console.error("Worker - Unrecognized message: " + e); + } + }; +} diff --git a/sitemap.xml b/sitemap.xml new file mode 100644 index 00000000..3252b9b7 --- /dev/null +++ b/sitemap.xml @@ -0,0 +1,43 @@ + + + + https://hpc2n.github.io/intro-course/ + 2024-06-28 + daily + + + https://hpc2n.github.io/intro-course/batch/ + 2024-06-28 + daily + + + https://hpc2n.github.io/intro-course/compilers/ + 2024-06-28 + daily + + + https://hpc2n.github.io/intro-course/filesystem/ + 2024-06-28 + daily + + + https://hpc2n.github.io/intro-course/intro/ + 2024-06-28 + daily + + + https://hpc2n.github.io/intro-course/login/ + 2024-06-28 + daily + + + https://hpc2n.github.io/intro-course/modules/ + 2024-06-28 + daily + + + https://hpc2n.github.io/intro-course/projectsaccounts/ + 2024-06-28 + daily + + \ No newline at end of file diff --git a/sitemap.xml.gz b/sitemap.xml.gz new file mode 100644 index 00000000..ae43f229 Binary files /dev/null and b/sitemap.xml.gz differ