\documentclass[12pt,titlepage]{article}
\usepackage{amsmath}
\usepackage{amsfonts}
\usepackage{amssymb}
\usepackage{amsthm}
\usepackage{mathtools}
\usepackage{graphicx}
\usepackage{color}
\usepackage{ucs}
\usepackage[utf8x]{inputenc}
\usepackage{xparse}
\usepackage{hyperref}
%----Macros----------
%
% Unresolved issues:
%
% \righttoleftarrow
% \lefttorightarrow
%
% \color{} with HTML colorspec
% \bgcolor
% \array with options (without options, it's equivalent to the matrix environment)
% Of the standard HTML named colors, white, black, red, green, blue and yellow
% are predefined in the color package. Here are the rest.
\definecolor{aqua}{rgb}{0, 1.0, 1.0}
\definecolor{fuschia}{rgb}{1.0, 0, 1.0}
\definecolor{gray}{rgb}{0.502, 0.502, 0.502}
\definecolor{lime}{rgb}{0, 1.0, 0}
\definecolor{maroon}{rgb}{0.502, 0, 0}
\definecolor{navy}{rgb}{0, 0, 0.502}
\definecolor{olive}{rgb}{0.502, 0.502, 0}
\definecolor{purple}{rgb}{0.502, 0, 0.502}
\definecolor{silver}{rgb}{0.753, 0.753, 0.753}
\definecolor{teal}{rgb}{0, 0.502, 0.502}
% Because of conflicts, \space and \mathop are converted to
% \itexspace and \operatorname during preprocessing.
% itex: \space{ht}{dp}{wd}
%
% Height and baseline depth measurements are in units of tenths of an ex while
% the width is measured in tenths of an em.
\makeatletter
\newdimen\itex@wd%
\newdimen\itex@dp%
\newdimen\itex@thd%
\def\itexspace#1#2#3{\itex@wd=#3em%
\itex@wd=0.1\itex@wd%
\itex@dp=#2ex%
\itex@dp=0.1\itex@dp%
\itex@thd=#1ex%
\itex@thd=0.1\itex@thd%
\advance\itex@thd\the\itex@dp%
\makebox[\the\itex@wd]{\rule[-\the\itex@dp]{0cm}{\the\itex@thd}}}
\makeatother
% \tensor and \multiscript
\makeatletter
\newif\if@sup
\newtoks\@sups
\def\append@sup#1{\edef\act{\noexpand\@sups={\the\@sups #1}}\act}%
\def\reset@sup{\@supfalse\@sups={}}%
\def\mk@scripts#1#2{\if #2/ \if@sup ^{\the\@sups}\fi \else%
\ifx #1_ \if@sup ^{\the\@sups}\reset@sup \fi {}_{#2}%
\else \append@sup#2 \@suptrue \fi%
\expandafter\mk@scripts\fi}
\def\tensor#1#2{\reset@sup#1\mk@scripts#2_/}
\def\multiscripts#1#2#3{\reset@sup{}\mk@scripts#1_/#2%
\reset@sup\mk@scripts#3_/}
\makeatother
% \slash
\makeatletter
\newbox\slashbox \setbox\slashbox=\hbox{$/$}
\def\itex@pslash#1{\setbox\@tempboxa=\hbox{$#1$}
\@tempdima=0.5\wd\slashbox \advance\@tempdima 0.5\wd\@tempboxa
\copy\slashbox \kern-\@tempdima \box\@tempboxa}
\def\slash{\protect\itex@pslash}
\makeatother
% math-mode versions of \rlap, etc
% from Alexander Perlis, "A complement to \smash, \llap, and lap"
% http://math.arizona.edu/~aprl/publications/mathclap/
\def\clap#1{\hbox to 0pt{\hss#1\hss}}
\def\mathllap{\mathpalette\mathllapinternal}
\def\mathrlap{\mathpalette\mathrlapinternal}
\def\mathclap{\mathpalette\mathclapinternal}
\def\mathllapinternal#1#2{\llap{$\mathsurround=0pt#1{#2}$}}
\def\mathrlapinternal#1#2{\rlap{$\mathsurround=0pt#1{#2}$}}
\def\mathclapinternal#1#2{\clap{$\mathsurround=0pt#1{#2}$}}
% Renames \sqrt as \oldsqrt and redefine root to result in \sqrt[#1]{#2}
\let\oldroot\root
\def\root#1#2{\oldroot #1 \of{#2}}
\renewcommand{\sqrt}[2][]{\oldroot #1 \of{#2}}
% Manually declare the txfonts symbolsC font
\DeclareSymbolFont{symbolsC}{U}{txsyc}{m}{n}
\SetSymbolFont{symbolsC}{bold}{U}{txsyc}{bx}{n}
\DeclareFontSubstitution{U}{txsyc}{m}{n}
% Manually declare the stmaryrd font
\DeclareSymbolFont{stmry}{U}{stmry}{m}{n}
\SetSymbolFont{stmry}{bold}{U}{stmry}{b}{n}
% Manually declare the MnSymbolE font
\DeclareFontFamily{OMX}{MnSymbolE}{}
\DeclareSymbolFont{mnomx}{OMX}{MnSymbolE}{m}{n}
\SetSymbolFont{mnomx}{bold}{OMX}{MnSymbolE}{b}{n}
\DeclareFontShape{OMX}{MnSymbolE}{m}{n}{
<-6> MnSymbolE5
<6-7> MnSymbolE6
<7-8> MnSymbolE7
<8-9> MnSymbolE8
<9-10> MnSymbolE9
<10-12> MnSymbolE10
<12-> MnSymbolE12}{}
% Declare specific arrows from txfonts without loading the full package
\makeatletter
\def\re@DeclareMathSymbol#1#2#3#4{%
\let#1=\undefined
\DeclareMathSymbol{#1}{#2}{#3}{#4}}
\re@DeclareMathSymbol{\neArrow}{\mathrel}{symbolsC}{116}
\re@DeclareMathSymbol{\neArr}{\mathrel}{symbolsC}{116}
\re@DeclareMathSymbol{\seArrow}{\mathrel}{symbolsC}{117}
\re@DeclareMathSymbol{\seArr}{\mathrel}{symbolsC}{117}
\re@DeclareMathSymbol{\nwArrow}{\mathrel}{symbolsC}{118}
\re@DeclareMathSymbol{\nwArr}{\mathrel}{symbolsC}{118}
\re@DeclareMathSymbol{\swArrow}{\mathrel}{symbolsC}{119}
\re@DeclareMathSymbol{\swArr}{\mathrel}{symbolsC}{119}
\re@DeclareMathSymbol{\nequiv}{\mathrel}{symbolsC}{46}
\re@DeclareMathSymbol{\Perp}{\mathrel}{symbolsC}{121}
\re@DeclareMathSymbol{\Vbar}{\mathrel}{symbolsC}{121}
\re@DeclareMathSymbol{\sslash}{\mathrel}{stmry}{12}
\re@DeclareMathSymbol{\bigsqcap}{\mathop}{stmry}{"64}
\re@DeclareMathSymbol{\biginterleave}{\mathop}{stmry}{"6}
\re@DeclareMathSymbol{\invamp}{\mathrel}{symbolsC}{77}
\re@DeclareMathSymbol{\parr}{\mathrel}{symbolsC}{77}
\makeatother
% \llangle, \rrangle, \lmoustache and \rmoustache from MnSymbolE
\makeatletter
\def\Decl@Mn@Delim#1#2#3#4{%
\if\relax\noexpand#1%
\let#1\undefined
\fi
\DeclareMathDelimiter{#1}{#2}{#3}{#4}{#3}{#4}}
\def\Decl@Mn@Open#1#2#3{\Decl@Mn@Delim{#1}{\mathopen}{#2}{#3}}
\def\Decl@Mn@Close#1#2#3{\Decl@Mn@Delim{#1}{\mathclose}{#2}{#3}}
\Decl@Mn@Open{\llangle}{mnomx}{'164}
\Decl@Mn@Close{\rrangle}{mnomx}{'171}
\Decl@Mn@Open{\lmoustache}{mnomx}{'245}
\Decl@Mn@Close{\rmoustache}{mnomx}{'244}
\makeatother
% Widecheck
\makeatletter
\DeclareRobustCommand\widecheck[1]{{\mathpalette\@widecheck{#1}}}
\def\@widecheck#1#2{%
\setbox\z@\hbox{\m@th$#1#2$}%
\setbox\tw@\hbox{\m@th$#1%
\widehat{%
\vrule\@width\z@\@height\ht\z@
\vrule\@height\z@\@width\wd\z@}$}%
\dp\tw@-\ht\z@
\@tempdima\ht\z@ \advance\@tempdima2\ht\tw@ \divide\@tempdima\thr@@
\setbox\tw@\hbox{%
\raise\@tempdima\hbox{\scalebox{1}[-1]{\lower\@tempdima\box
\tw@}}}%
{\ooalign{\box\tw@ \cr \box\z@}}}
\makeatother
% \mathraisebox{voffset}[height][depth]{something}
\makeatletter
\NewDocumentCommand\mathraisebox{moom}{%
\IfNoValueTF{#2}{\def\@temp##1##2{\raisebox{#1}{$\m@th##1##2$}}}{%
\IfNoValueTF{#3}{\def\@temp##1##2{\raisebox{#1}[#2]{$\m@th##1##2$}}%
}{\def\@temp##1##2{\raisebox{#1}[#2][#3]{$\m@th##1##2$}}}}%
\mathpalette\@temp{#4}}
\makeatletter
% udots (taken from yhmath)
\makeatletter
\def\udots{\mathinner{\mkern2mu\raise\p@\hbox{.}
\mkern2mu\raise4\p@\hbox{.}\mkern1mu
\raise7\p@\vbox{\kern7\p@\hbox{.}}\mkern1mu}}
\makeatother
%% Fix array
\newcommand{\itexarray}[1]{\begin{matrix}#1\end{matrix}}
%% \itexnum is a noop
\newcommand{\itexnum}[1]{#1}
%% Renaming existing commands
\newcommand{\underoverset}[3]{\underset{#1}{\overset{#2}{#3}}}
\newcommand{\widevec}{\overrightarrow}
\newcommand{\darr}{\downarrow}
\newcommand{\nearr}{\nearrow}
\newcommand{\nwarr}{\nwarrow}
\newcommand{\searr}{\searrow}
\newcommand{\swarr}{\swarrow}
\newcommand{\curvearrowbotright}{\curvearrowright}
\newcommand{\uparr}{\uparrow}
\newcommand{\downuparrow}{\updownarrow}
\newcommand{\duparr}{\updownarrow}
\newcommand{\updarr}{\updownarrow}
\newcommand{\gt}{>}
\newcommand{\lt}{<}
\newcommand{\map}{\mapsto}
\newcommand{\embedsin}{\hookrightarrow}
\newcommand{\Alpha}{A}
\newcommand{\Beta}{B}
\newcommand{\Zeta}{Z}
\newcommand{\Eta}{H}
\newcommand{\Iota}{I}
\newcommand{\Kappa}{K}
\newcommand{\Mu}{M}
\newcommand{\Nu}{N}
\newcommand{\Rho}{P}
\newcommand{\Tau}{T}
\newcommand{\Upsi}{\Upsilon}
\newcommand{\omicron}{o}
\newcommand{\lang}{\langle}
\newcommand{\rang}{\rangle}
\newcommand{\Union}{\bigcup}
\newcommand{\Intersection}{\bigcap}
\newcommand{\Oplus}{\bigoplus}
\newcommand{\Otimes}{\bigotimes}
\newcommand{\Wedge}{\bigwedge}
\newcommand{\Vee}{\bigvee}
\newcommand{\coproduct}{\coprod}
\newcommand{\product}{\prod}
\newcommand{\closure}{\overline}
\newcommand{\integral}{\int}
\newcommand{\doubleintegral}{\iint}
\newcommand{\tripleintegral}{\iiint}
\newcommand{\quadrupleintegral}{\iiiint}
\newcommand{\conint}{\oint}
\newcommand{\contourintegral}{\oint}
\newcommand{\infinity}{\infty}
\newcommand{\bottom}{\bot}
\newcommand{\minusb}{\boxminus}
\newcommand{\plusb}{\boxplus}
\newcommand{\timesb}{\boxtimes}
\newcommand{\intersection}{\cap}
\newcommand{\union}{\cup}
\newcommand{\Del}{\nabla}
\newcommand{\odash}{\circleddash}
\newcommand{\negspace}{\!}
\newcommand{\widebar}{\overline}
\newcommand{\textsize}{\normalsize}
\renewcommand{\scriptsize}{\scriptstyle}
\newcommand{\scriptscriptsize}{\scriptscriptstyle}
\newcommand{\mathfr}{\mathfrak}
\newcommand{\statusline}[2]{#2}
\newcommand{\tooltip}[2]{#2}
\newcommand{\toggle}[2]{#2}
% Theorem Environments
\theoremstyle{plain}
\newtheorem{theorem}{Theorem}
\newtheorem{lemma}{Lemma}
\newtheorem{prop}{Proposition}
\newtheorem{cor}{Corollary}
\newtheorem*{utheorem}{Theorem}
\newtheorem*{ulemma}{Lemma}
\newtheorem*{uprop}{Proposition}
\newtheorem*{ucor}{Corollary}
\theoremstyle{definition}
\newtheorem{defn}{Definition}
\newtheorem{example}{Example}
\newtheorem*{udefn}{Definition}
\newtheorem*{uexample}{Example}
\theoremstyle{remark}
\newtheorem{remark}{Remark}
\newtheorem{note}{Note}
\newtheorem*{uremark}{Remark}
\newtheorem*{unote}{Note}
%-------------------------------------------------------------------
\begin{document}
%-------------------------------------------------------------------
\section*{Blog - El Niño project (part 4)}
This is a [[Blog articles in progress|blog article in progress]], written by [[John Baez]]. To see discussions of the article as it was being written, visit the \href{http://forum.azimuthproject.org/discussion/1385/blog-el-nino-project-part-4/?Focus=11364#Comment_11364}{Azimuth Forum}. For the final polished article, go to the \href{http://johncarlosbaez.wordpress.com/2014/07/08/el-nino-project-part-4/}{Azimuth Blog}.
If you want to write your own article, please read the directions on \href{http://www.azimuthproject.org/azimuth/show/How+to#blog}{How to blog}.
As the first big step in our El Ni\~n{}o prediction project, Graham Jones replicated the paper by Ludescher that I explained . Let's see how this works!
Graham did this using , a programming language that's good for statistics. But if you prefer another language, go ahead and write software for that\ldots{} and let us know! We can add it to our repository.
Today I'll explain this stuff to people who know their way around computers. But I'm not one of those people! So, next time I'll explain the nitty-gritty details in a way that may be helpful to people more like me.
Say you want to predict El Ni\~n{}os from 1950 to 1980 using Ludescher s method. To do this, you need daily average surface air temperatures in this grid in the Pacific Ocean:
Each square here is 7.5\textdegree{} $\times$ 7.5\textdegree{}. To compute these temperatures, you have to start with temperatures on a grid with smaller squares that are 2.5\textdegree{} $\times$ 2.5\textdegree{} in size:
$\bullet$ Earth System Research Laboratory, , or .
You can get the website to deliver temperatures in a given rectangle in a given time interval. It gives you this data in a format called , meaning . We'll take a different approach. We'll download the Earth's temperatures from 1948 to 2013, and then extract the data we need using R scripts. That way, when we play other games with temperature data later, we'll already have it.
So, go ahead and download all files from {\colorbox[rgb]{1.00,0.93,1.00}{\tt air\char46sig\char57\char57\char53\char46\char49\char57\char52\char56\char46nc}} to {\colorbox[rgb]{1.00,0.93,1.00}{\tt air\char46sig\char57\char57\char53\char46\char50\char48\char49\char51\char46nc}}. It will take a while\ldots{} but you'll own the world.
There are different ways to do this. If you have R fired up, just cut-and-paste this into the console:
\begin{verbatim}for (year in 1950:1979) {
download.file(url=paste0(
"ftp://ftp.cdc.noaa.gov/Datasets/ncep.reanalysis.dailyavgs/surface/air.sig995.",
year, ".nc"),
destfile=paste0("air.sig995.", year, ".nc"), mode="wb")
}\end{verbatim}
Now you have files of daily average temperatures on a 2.5\textdegree{} by 2.5\textdegree{} grid from 1948 to 2013. Make sure all these files are in your working directory for R, and download this R script from GitHub:
$\bullet$
Graham wrote it; I just modified it a bit. You can use this to get the temperatures in any time interval and any rectangle of grid points you want. The details are explained in the script. But the defaults are set to precisely what you need now!
So, just run this. You should get a file called {\colorbox[rgb]{1.00,0.93,1.00}{\tt Pacific\char45\char49\char57\char52\char56\char45\char49\char57\char56\char48\char46txt}}. This has daily average temperatures in the region we care about, from 1948 to 1980. It should start with a really long line listing locations in a 27 $\times$ 69 grid, starting with S024E48 and ending with S248E116. I'll explain this coordinate scheme at the end of this post. Then come hundreds of lines listing temperatures in kelvin at those locations on successive days. The first of these lines should start with Y1948P001, meaning the first day of 1948.
And I know what you're dying to ask: yes, leap days are omitted! This annoys the perfectionist in me\ldots{} but leap years make data analysis more complicated, so Ludescher ignore leap days, so we do.
You'll use this data to predict El Ni\~n{}os, so you also want a file of the index. Remember from last time, this says how much hotter the surface of this patch of seawater is than usual for this time of year:
You can download the file from here:
$\bullet$
This is a copy of the from the US National Weather Service, which I discussed last time. It has monthly Ni\~n{}o 3.4 data in the column called {\colorbox[rgb]{1.00,0.93,1.00}{\tt ANOM}}.
Put this file in your working directory.
Now you've got {\colorbox[rgb]{1.00,0.93,1.00}{\tt Pacific\char45\char49\char57\char52\char56\char45\char49\char57\char56\char48\char46txt}} and {\colorbox[rgb]{1.00,0.93,1.00}{\tt nino\char51\char46\char52\char45anoms\char46txt}} in your working directory. Download this R script written by Graham Jones, and run it:
$\bullet$
It takes about 45 minutes on my laptop. It computes the average link strength $S$ that I explained last time. It plots $S$ in red and the Ni\~n{}o 3.4 index in blue, like this:
(Click to enlarge.) The shaded region is where the Ni\~n{}o 3.4 index is below 0.5\textdegree{}C. When the blue curve escapes this region and then stays above 0.5\textdegree{}C for at least 5 months, Ludescher say that there's an El Ni\~n{}o.
The horizontal red line shows the threshold $\theta = 2.82$. When $S$ exceeds this, and the Niño 3.4 index is not over 0.5°C, Ludescher predict that there will be an El Niño in the next calendar year!
Our graph almost matches the corresponding graph in Ludescher :
Here the green arrows show their successful predictions, dashed arrows show false alarms, and a little letter n appears next to each El Niño they failed to predict.
The graphs don't match perfectly. For the blue curves, we could be using Ni\~n{}o 3.4 from different sources. Differences in the red curves are more interesting, since that's where all the work is involved, and we're starting with the same data. Beside actual bugs, which are always possible, I can think of various explanations. None of them are extremely interesting, so I'll stick them in the last section!
If you want to get ahold of our output, you can do so here:
$\bullet$ .
This has the average link strength $S$ at 10-day intervals, starting from day 730 and going until day 12040, where day 1 is the first of January 1948.
So, you don't actually have to run all these programs to get our final result. However, these programs will help you tackle some programming challenges which I'll list now!
There are lots of variations on the Ludescher paper which we could explore. Here are a few easy ones to get you started. If you do any of these, or anything else, let me know!
I'll start with a really easy one, and work on up.
Repeat the calculation with temperature data from 1980 to 2013. You'll have to get the relevant temperature data and adjust two lines in {\colorbox[rgb]{1.00,0.93,1.00}{\tt netcdf\char45convertor\char45ludescher\char46R}}:
\begin{verbatim}firstyear <- 1948
lastyear <- 1980\end{verbatim}
should become
\begin{verbatim}firstyear <- 1980
lastyear <- 2013\end{verbatim}
or whatever range of years you want. You'll also have to adjust names of years in {\colorbox[rgb]{1.00,0.93,1.00}{\tt ludescher\char45replication\char46R}}. Search the file for the string {\colorbox[rgb]{1.00,0.93,1.00}{\tt \char49\char57}} and make the necessary changes. Ask me if you get stuck.
Repeat the calculation with temperature data on a 2.5\textdegree{} $\times$ 2.5\textdegree{} grid instead of the coarser 7.5\textdegree{} $\times$ 7.5\textdegree{} grid Ludescher use. You've got the data you need. Right now, the program {\colorbox[rgb]{1.00,0.93,1.00}{\tt ludescher\char45replication\char46R}} averages out the temperatures over little 3 $\times$ 3 squares: it starts with temperatures on a 27 $\times$ 69 grid and averages them out to obtain temperatures on the 9 $\times$ 23 grid shown here:
Here's where that happens:
\begin{verbatim}# the data per day is reduced from e.g. 27x69 to 9x23.
subsample.3x3 <- function(vals) {
stopifnot(dim(vals)[2] %% 3 == 0)
stopifnot(dim(vals)[3] %% 3 == 0)
n.sslats <- dim(vals)[2]/3
n.sslons <- dim(vals)[3]/3
ssvals <- array(0, dim=c(dim(vals)[1], n.sslats, n.sslons))
for (d in 1:dim(vals)[1]) {
for (slat in 1:n.sslats) {
for (slon in 1:n.sslons) {
ssvals[d, slat, slon] <- mean(vals[d, (3*slat-2):(3*slat), (3*slon-2):(3*slon)])
}
}
}
ssvals
}
\end{verbatim}
So, you need to eliminate this and change whatever else needs to be changed. What new value of the threshold $\theta$ looks good for predicting El Ni\~n{}os now? Most importantly:
The calculation may take a lot longer, since you've got 9 times as many grid points and you're calculating correlations between pairs. So if this is too tough, you can go the other way: use a coarser grid and see how much that your ability to predict El Ni\~n{}os.
Right now the average link strength for all pairs $(i,j)$ where $i$ is a node in the defined by Ludescher , and $j$ is a node outside this basin. The basin consists of the red dots here:
What happens if you change the definition of the El Ni\~n{}o basin? For example, can you drop those annoying two red dots that are south of the rest, without messing things up?
To study these questions you need to rewrite {\colorbox[rgb]{1.00,0.93,1.00}{\tt ludescher\char45replication\char46R}} a bit. Here's where Graham defines the El Ni\~n{}o basin:
\begin{verbatim}ludescher.basin <- function() {
lats <- c( 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 6, 6)
lons <- c(11,12,13,14,15,16,17,18,19,20,21,22,16,22)
stopifnot(length(lats) == length(lons))
list(lats=lats,lons=lons)
}\end{verbatim}
These are lists of latitude and longitude coordinates: (5,11), (5,12), (5,13), etc. A coordinate like (5,11) means the little circle that's 5 down and 11 across in the grid on the above map. So, that's the leftmost point in Ludescher's El Ni\~n{}o basin. By changing these lists, you can change the definition of the El Ni\~n{}o basin. You'll also have to change these lists if you tackle Challenge 2.
There's a lot more you can do\ldots{} the sky's the limit! In the weeks to come, I'll show you lots of things we've actually done.
Here are two reasons our average link strengths could differ from Ludescher's.
Last time I mentioned that Ludescher claim to normalize their time-delayed cross-covariances in a sort of complicated way. I explained why I don't think they could have actually used this method. In {\colorbox[rgb]{1.00,0.93,1.00}{\tt ludescher\char45replication\char46R}}, Graham used the simpler normalization described last time: namely, dividing by
\begin{displaymath}
\sqrt{\langle T_i(t)^2 \rangle - \langle T_i(t) \rangle^2} \; \sqrt{\langle T_j(t-\tau)^2 \rangle - \langle T_j(t-\tau) \rangle^2}
\end{displaymath}
instead of
\begin{displaymath}
\sqrt{ \langle (T_i(t) - \langle T_i(t)\rangle)^2 \rangle} \; \sqrt{ \langle (T_j(t-\tau) - \langle T_j(t-\tau)\rangle)^2 \rangle}
\end{displaymath}
Since we don't really know what Ludescher did, they might have done something else.
We might also have used a different `subsampling' procedure. That's a name for how we get from the temperature data on a 9 $\times$ 69 grid to temperatures on a 3 $\times$ 23 grid. While the original data files give temperatures named after grid points, each is really an area-averaged temperature for a 2.5\textdegree{} $\times$ 2.5\textdegree{} square. Is this square at the grid point, or is the square having that grid point as its north-west corner, or what? I don't know.
This data is on a grid where the coordinates are the number of steps of 2.5 degrees, counting from 1. So, for latitude, 1 means the North Pole, 73 means the South Pole. For longitude, 1 means the prime meridian, 37 means 90\textdegree{} east, 73 means 180\textdegree{} east, 109 means 270\textdegree{}E or 90\textdegree{}W, and 144 means 2.5\textdegree{} west. It's an annoying system, as far as I'm concerned.
In {\colorbox[rgb]{1.00,0.93,1.00}{\tt ludescher\char45replication\char46R}} we use this range of coordinates:
\begin{verbatim}lat.range <- 24:50
lon.range <- 48:116 \end{verbatim}
That's why your file {\colorbox[rgb]{1.00,0.93,1.00}{\tt Pacific\char45\char49\char57\char52\char56\char45\char49\char57\char56\char48\char46txt}} has locations starting with S024E48 and ending with S248E116. Maybe Ludescher used a slightly different range or subsampling procedure!
There are probably lots of other nuances I haven't noticed. Can you think of some?
category: blog, climate
[[!redirects Blog - El Nino project (part 4)]]
\end{document}