summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--AUTHORS2
-rw-r--r--CHANGES3
-rw-r--r--pygments/lexers/_mapping.py1
-rw-r--r--pygments/lexers/jvm.py2
-rw-r--r--pygments/lexers/math.py32
-rw-r--r--pygments/lexers/text.py6
-rw-r--r--pygments/lexers/web.py137
-rw-r--r--tests/examplefiles/http_request_example3
-rw-r--r--tests/examplefiles/http_response_example6
-rw-r--r--tests/examplefiles/livescript-demo.ls41
-rw-r--r--tests/examplefiles/test.R264
11 files changed, 355 insertions, 142 deletions
diff --git a/AUTHORS b/AUTHORS
index 8a662633..b0ecaf43 100644
--- a/AUTHORS
+++ b/AUTHORS
@@ -9,6 +9,7 @@ Other contributors, listed alphabetically, are:
* Kumar Appaiah -- Debian control lexer
* Ali Afshar -- image formatter
* Andreas Amann -- AppleScript lexer
+* Jeffrey Arnold -- R/S lexer
* Jeremy Ashkenas -- CoffeeScript lexer
* Jeffrey Arnold -- BUGS lexers
* Stefan Matthias Aust -- Smalltalk lexer
@@ -67,6 +68,7 @@ Other contributors, listed alphabetically, are:
* Stephen McKamey -- Duel/JBST lexer
* Brian McKenna -- F# lexer
* Lukas Meuser -- BBCode formatter, Lua lexer
+* Paul Miller -- LiveScript lexer
* Hong Minhee -- HTTP lexer
* Michael Mior -- Awk lexer
* Paulo Moura -- Logtalk lexer
diff --git a/CHANGES b/CHANGES
index 0bf10329..5fbe80f6 100644
--- a/CHANGES
+++ b/CHANGES
@@ -12,9 +12,12 @@ Version 1.6
* Julia (PR#61)
* Croc (new name for MiniD)
+ * LiveScript (PR#84)
- Fix Template Haskell highlighting (PR#63)
+- Fix some S/R lexer errors (PR#91)
+
Version 1.5
-----------
diff --git a/pygments/lexers/_mapping.py b/pygments/lexers/_mapping.py
index 6f342552..40a788f9 100644
--- a/pygments/lexers/_mapping.py
+++ b/pygments/lexers/_mapping.py
@@ -139,6 +139,7 @@ LEXERS = {
'KotlinLexer': ('pygments.lexers.jvm', 'Kotlin', ('kotlin',), ('*.kt',), ('text/x-kotlin',)),
'LighttpdConfLexer': ('pygments.lexers.text', 'Lighttpd configuration file', ('lighty', 'lighttpd'), (), ('text/x-lighttpd-conf',)),
'LiterateHaskellLexer': ('pygments.lexers.functional', 'Literate Haskell', ('lhs', 'literate-haskell'), ('*.lhs',), ('text/x-literate-haskell',)),
+ 'LiveScriptLexer': ('pygments.lexers.web', 'LiveScript', ('livescript',), ('*.ls',), ('text/livescript',)),
'LlvmLexer': ('pygments.lexers.asm', 'LLVM', ('llvm',), ('*.ll',), ('text/x-llvm',)),
'LogtalkLexer': ('pygments.lexers.other', 'Logtalk', ('logtalk',), ('*.lgt',), ('text/x-logtalk',)),
'LuaLexer': ('pygments.lexers.agile', 'Lua', ('lua',), ('*.lua', '*.wlua'), ('text/x-lua', 'application/x-lua')),
diff --git a/pygments/lexers/jvm.py b/pygments/lexers/jvm.py
index b56d4582..64338e46 100644
--- a/pygments/lexers/jvm.py
+++ b/pygments/lexers/jvm.py
@@ -644,7 +644,7 @@ class ClojureLexer(RegexLexer):
(r"\\(.|[a-z]+)", String.Char),
# keywords
- (r':' + valid_name, String.Symbol),
+ (r'::?' + valid_name, String.Symbol),
# special operators
(r'~@|[`\'#^~&]', Operator),
diff --git a/pygments/lexers/math.py b/pygments/lexers/math.py
index a6d7a5c5..ead579e3 100644
--- a/pygments/lexers/math.py
+++ b/pygments/lexers/math.py
@@ -1010,34 +1010,40 @@ class SLexer(RegexLexer):
],
'valid_name': [
(r'[a-zA-Z][0-9a-zA-Z\._]+', Text),
- (r'`.+`', String.Backtick),
+ # can begin with ., but not if that is followed by a digit
+ (r'\.[a-zA-Z_][0-9a-zA-Z\._]+', Text),
],
'punctuation': [
- (r'\[|\]|\[\[|\]\]|\$|\(|\)|@|:::?|;|,', Punctuation),
+ (r'\[{1,2}|\]{1,2}|\(|\)|;|,', Punctuation),
],
'keywords': [
- (r'for(?=\s*\()|while(?=\s*\()|if(?=\s*\()|(?<=\s)else|'
- r'(?<=\s)break(?=;|$)|return(?=\s*\()|function(?=\s*\()',
+ (r'(if|else|for|while|repeat|in|next|break|return|switch|function)'
+ r'(?![0-9a-zA-Z\._])',
Keyword.Reserved)
],
'operators': [
- (r'<-|-|==|<=|>=|<|>|&&|&|!=|\|\|?', Operator),
- (r'\*|\+|\^|/|%%|%/%|=', Operator),
- (r'%in%|%*%', Operator)
+ (r'<<?-|->>?|-|==|<=|>=|<|>|&&?|!=|\|\|?|\?', Operator),
+ (r'\*|\+|\^|/|!|%[^%]*%|=|~|\$|@|:{1,3}', Operator)
],
'builtin_symbols': [
- (r'(NULL|NA|TRUE|FALSE|NaN)\b', Keyword.Constant),
+ (r'(NULL|NA(_(integer|real|complex|character)_)?|'
+ r'Inf|TRUE|FALSE|NaN|\.\.(\.|[0-9]+))'
+ r'(?![0-9a-zA-Z\._])',
+ Keyword.Constant),
(r'(T|F)\b', Keyword.Variable),
],
'numbers': [
- (r'(?<![0-9a-zA-Z\)\}\]`\"])(?=\s*)[-\+]?[0-9]+'
- r'(\.[0-9]*)?(E[0-9][-\+]?(\.[0-9]*)?)?', Number),
- (r'\.[0-9]*(E[0-9][-\+]?(\.[0-9]*)?)?', Number),
+ # hex number
+ (r'0[xX][a-fA-F0-9]+([pP][0-9]+)?[Li]?', Number.Hex),
+ # decimal number
+ (r'[+-]?([0-9]+(\.[0-9]+)?|\.[0-9]+)([eE][+-]?[0-9]+)?[Li]?',
+ Number),
],
'statements': [
include('comments'),
# whitespaces
(r'\s+', Text),
+ (r'`.*?`', String.Backtick),
(r'\'', String, 'string_squote'),
(r'\"', String, 'string_dquote'),
include('builtin_symbols'),
@@ -1060,10 +1066,10 @@ class SLexer(RegexLexer):
# ('\}', Punctuation, '#pop')
#],
'string_squote': [
- (r'[^\']*\'', String, '#pop'),
+ (r'([^\'\\]|\\.)*\'', String, '#pop'),
],
'string_dquote': [
- (r'[^\"]*\"', String, '#pop'),
+ (r'([^"\\]|\\.)*"', String, '#pop'),
],
}
diff --git a/pygments/lexers/text.py b/pygments/lexers/text.py
index 130ddba9..ec69337c 100644
--- a/pygments/lexers/text.py
+++ b/pygments/lexers/text.py
@@ -1643,6 +1643,11 @@ class HttpLexer(RegexLexer):
yield match.start(5), Literal, match.group(5)
yield match.start(6), Text, match.group(6)
+ def continuous_header_callback(self, match):
+ yield match.start(1), Text, match.group(1)
+ yield match.start(2), Literal, match.group(2)
+ yield match.start(3), Text, match.group(3)
+
def content_callback(self, match):
content_type = getattr(self, 'content_type', None)
content = match.group()
@@ -1673,6 +1678,7 @@ class HttpLexer(RegexLexer):
],
'headers': [
(r'([^\s:]+)( *)(:)( *)([^\r\n]+)(\r?\n|$)', header_callback),
+ (r'([\t ]+)([^\r\n]+)(\r?\n|$)', continuous_header_callback),
(r'\r?\n', Text, 'content')
],
'content': [
diff --git a/pygments/lexers/web.py b/pygments/lexers/web.py
index 6b788e82..f9ed1205 100644
--- a/pygments/lexers/web.py
+++ b/pygments/lexers/web.py
@@ -25,8 +25,9 @@ from pygments.lexers.compiled import ScalaLexer
__all__ = ['HtmlLexer', 'XmlLexer', 'JavascriptLexer', 'JSONLexer', 'CssLexer',
'PhpLexer', 'ActionScriptLexer', 'XsltLexer', 'ActionScript3Lexer',
'MxmlLexer', 'HaxeLexer', 'HamlLexer', 'SassLexer', 'ScssLexer',
- 'ObjectiveJLexer', 'CoffeeScriptLexer', 'DuelLexer', 'ScamlLexer',
- 'JadeLexer', 'XQueryLexer', 'DtdLexer', 'DartLexer']
+ 'ObjectiveJLexer', 'CoffeeScriptLexer', 'LiveScriptLexer',
+ 'DuelLexer', 'ScamlLexer', 'JadeLexer', 'XQueryLexer',
+ 'DtdLexer', 'DartLexer']
class JavascriptLexer(RegexLexer):
@@ -1817,28 +1818,32 @@ class CoffeeScriptLexer(RegexLexer):
# this next expr leads to infinite loops root -> slashstartsregex
#(r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
include('commentsandwhitespace'),
- (r'\+\+|--|~|&&|\band\b|\bor\b|\bis\b|\bisnt\b|\bnot\b|\?|:|=|'
- r'\|\||\\(?=\n)|(<<|>>>?|==?|!=?|[-<>+*`%&\|\^/])=?',
+ (r'\+\+|~|&&|\band\b|\bor\b|\bis\b|\bisnt\b|\bnot\b|\?|:|'
+ r'\|\||\\(?=\n)|(<<|>>>?|==?|!=?|'
+ r'=(?!>)|-(?!>)|[<>+*`%&\|\^/])=?',
Operator, 'slashstartsregex'),
- (r'\([^()]*\)\s*->', Name.Function),
+ (r'(?:\([^()]+\))?\s*[=-]>', Name.Function),
(r'[{(\[;,]', Punctuation, 'slashstartsregex'),
(r'[})\].]', Punctuation),
- (r'(for|in|of|while|break|return|continue|switch|when|then|if|else|'
+ (r'(?<![\.\$])(for|own|in|of|while|until|'
+ r'loop|break|return|continue|'
+ r'switch|when|then|if|unless|else|'
r'throw|try|catch|finally|new|delete|typeof|instanceof|super|'
r'extends|this|class|by)\b', Keyword, 'slashstartsregex'),
- (r'(true|false|yes|no|on|off|null|NaN|Infinity|undefined)\b',
+ (r'(?<![\.\$])(true|false|yes|no|on|off|null|'
+ r'NaN|Infinity|undefined)\b',
Keyword.Constant),
(r'(Array|Boolean|Date|Error|Function|Math|netscape|'
r'Number|Object|Packages|RegExp|String|sun|decodeURI|'
r'decodeURIComponent|encodeURI|encodeURIComponent|'
r'eval|isFinite|isNaN|parseFloat|parseInt|document|window)\b',
Name.Builtin),
- (r'[$a-zA-Z_][a-zA-Z0-9_\.:]*\s*[:=]\s', Name.Variable,
+ (r'[$a-zA-Z_][a-zA-Z0-9_\.:\$]*\s*[:=]\s', Name.Variable,
'slashstartsregex'),
- (r'@[$a-zA-Z_][a-zA-Z0-9_\.:]*\s*[:=]\s', Name.Variable.Instance,
+ (r'@[$a-zA-Z_][a-zA-Z0-9_\.:\$]*\s*[:=]\s', Name.Variable.Instance,
'slashstartsregex'),
(r'@', Name.Other, 'slashstartsregex'),
- (r'@?[$a-zA-Z_][a-zA-Z0-9_]*', Name.Other, 'slashstartsregex'),
+ (r'@?[$a-zA-Z_][a-zA-Z0-9_\$]*', Name.Other, 'slashstartsregex'),
(r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
(r'0x[0-9a-fA-F]+', Number.Hex),
(r'[0-9]+', Number.Integer),
@@ -1880,6 +1885,118 @@ class CoffeeScriptLexer(RegexLexer):
],
}
+
+class LiveScriptLexer(RegexLexer):
+ """
+ For `LiveScript`_ source code.
+
+ .. _LiveScript: http://gkz.github.com/LiveScript/
+
+ New in Pygments 1.6.
+ """
+
+ name = 'LiveScript'
+ aliases = ['live-script', 'livescript']
+ filenames = ['*.ls']
+ mimetypes = ['text/livescript']
+
+ flags = re.DOTALL
+ tokens = {
+ 'commentsandwhitespace': [
+ (r'\s+', Text),
+ (r'/\*.*?\*/', Comment.Multiline),
+ (r'#.*?\n', Comment.Single),
+ ],
+ 'multilineregex': [
+ include('commentsandwhitespace'),
+ (r'//([gim]+\b|\B)', String.Regex, '#pop'),
+ (r'/', String.Regex),
+ (r'[^/#]+', String.Regex)
+ ],
+ 'slashstartsregex': [
+ include('commentsandwhitespace'),
+ (r'//', String.Regex, ('#pop', 'multilineregex')),
+ (r'/(?! )(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
+ r'([gim]+\b|\B)', String.Regex, '#pop'),
+ (r'', Text, '#pop'),
+ ],
+ 'root': [
+ # this next expr leads to infinite loops root -> slashstartsregex
+ #(r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
+ include('commentsandwhitespace'),
+ (r'(?:\([^()]+\))?[ ]*[~-]{1,2}>|'
+ r'(?:\(?[^()\n]+\)?)?[ ]*<[~-]{1,2}', Name.Function),
+ (r'\+\+|&&|(?<![\.\$])\b(?:and|x?or|is|isnt|not)\b|\?|:|=|'
+ r'\|\||\\(?=\n)|(<<|>>>?|==?|!=?|'
+ r'~(?!\~?>)|-(?!\-?>)|<(?!\[)|(?<!\])>|'
+ r'[+*`%&\|\^/])=?',
+ Operator, 'slashstartsregex'),
+ (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
+ (r'[})\].]', Punctuation),
+ (r'(?<![\.\$])(for|own|in|of|while|until|loop|break|'
+ r'return|continue|switch|when|then|if|unless|else|'
+ r'throw|try|catch|finally|new|delete|typeof|instanceof|super|'
+ r'extends|this|class|by|const|var|to|til)\b', Keyword,
+ 'slashstartsregex'),
+ (r'(?<![\.\$])(true|false|yes|no|on|off|'
+ r'null|NaN|Infinity|undefined|void)\b',
+ Keyword.Constant),
+ (r'(Array|Boolean|Date|Error|Function|Math|netscape|'
+ r'Number|Object|Packages|RegExp|String|sun|decodeURI|'
+ r'decodeURIComponent|encodeURI|encodeURIComponent|'
+ r'eval|isFinite|isNaN|parseFloat|parseInt|document|window)\b',
+ Name.Builtin),
+ (r'[$a-zA-Z_][a-zA-Z0-9_\.\-:\$]*\s*[:=]\s', Name.Variable,
+ 'slashstartsregex'),
+ (r'@[$a-zA-Z_][a-zA-Z0-9_\.\-:\$]*\s*[:=]\s', Name.Variable.Instance,
+ 'slashstartsregex'),
+ (r'@', Name.Other, 'slashstartsregex'),
+ (r'@?[$a-zA-Z_][a-zA-Z0-9_\-]*', Name.Other, 'slashstartsregex'),
+ (r'[0-9]+\.[0-9]+([eE][0-9]+)?[fd]?(?:[a-zA-Z_]+)?', Number.Float),
+ (r'[0-9]+(~[0-9a-z]+)?(?:[a-zA-Z_]+)?', Number.Integer),
+ ('"""', String, 'tdqs'),
+ ("'''", String, 'tsqs'),
+ ('"', String, 'dqs'),
+ ("'", String, 'sqs'),
+ (r'\\[\w$-]+', String),
+ (r'<\[.*\]>', String),
+ ],
+ 'strings': [
+ (r'[^#\\\'"]+', String),
+ # note that all coffee script strings are multi-line.
+ # hashmarks, quotes and backslashes must be parsed one at a time
+ ],
+ 'interpoling_string' : [
+ (r'}', String.Interpol, "#pop"),
+ include('root')
+ ],
+ 'dqs': [
+ (r'"', String, '#pop'),
+ (r'\\.|\'', String), # double-quoted string don't need ' escapes
+ (r'#{', String.Interpol, "interpoling_string"),
+ (r'#', String),
+ include('strings')
+ ],
+ 'sqs': [
+ (r"'", String, '#pop'),
+ (r'#|\\.|"', String), # single quoted strings don't need " escapses
+ include('strings')
+ ],
+ 'tdqs': [
+ (r'"""', String, '#pop'),
+ (r'\\.|\'|"', String), # no need to escape quotes in triple-string
+ (r'#{', String.Interpol, "interpoling_string"),
+ (r'#', String),
+ include('strings'),
+ ],
+ 'tsqs': [
+ (r"'''", String, '#pop'),
+ (r'#|\\.|\'|"', String), # no need to escape quotes in triple-strings
+ include('strings')
+ ],
+ }
+
+
class DuelLexer(RegexLexer):
"""
Lexer for Duel Views Engine (formerly JBST) markup with JavaScript code blocks.
diff --git a/tests/examplefiles/http_request_example b/tests/examplefiles/http_request_example
index 5d2a1d52..675d1691 100644
--- a/tests/examplefiles/http_request_example
+++ b/tests/examplefiles/http_request_example
@@ -3,7 +3,8 @@ Host: pygments.org
Connection: keep-alivk
Cache-Control: max-age=0
Origin: http://pygments.org
-User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_2) AppleWebKit/535.7 (KHTML, like Gecko) Chrome/16.0.912.63 Safari/535.7
+User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_2)
+ AppleWebKit/535.7 (KHTML, like Gecko) Chrome/16.0.912.63 Safari/535.7
Content-Type: application/x-www-form-urlencoded
Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8
Referer: http://pygments.org/
diff --git a/tests/examplefiles/http_response_example b/tests/examplefiles/http_response_example
index bf53d61d..51340ca4 100644
--- a/tests/examplefiles/http_response_example
+++ b/tests/examplefiles/http_response_example
@@ -18,8 +18,10 @@ Expires: Tue, 31 Mar 1981 05:00:00 GMT
Cache-Control: no-cache, no-store, must-revalidate, pre-check=0, post-check=0
X-MID: a55f21733bc52bb11d1fc58f9b51b4974fbb8f83
X-RateLimit-Reset: 1323738416
-Set-Cookie: k=10.34.234.116.1323735104238974; path=/; expires=Tue, 20-Dec-11 00:11:44 GMT; domain=.twitter.com
-Set-Cookie: guest_id=v1%3A13237351042425496; domain=.twitter.com; path=/; expires=Thu, 12-Dec-2013 12:11:44 GMT
+Set-Cookie: k=10.34.234.116.1323735104238974; path=/;
+ expires=Tue, 20-Dec-11 00:11:44 GMT; domain=.twitter.com
+Set-Cookie: guest_id=v1%3A13237351042425496; domain=.twitter.com; path=/;
+ expires=Thu, 12-Dec-2013 12:11:44 GMT
Set-Cookie: _twitter_sess=BAh7CDoPY3JlYXRlZF9hdGwrCPS6wjQ0AToHaWQiJTFiMTlhY2E1ZjczYThk%250ANDUwMWQxNjMwZGU2YTQ1ODBhIgpmbGFzaElDOidBY3Rpb25Db250cm9sbGVy%250AOjpGbGFzaDo6Rmxhc2hIYXNoewAGOgpAdXNlZHsA--6b502f30a083e8a41a64f10930e142ea362b1561; domain=.twitter.com; path=/; HttpOnly
Vary: Accept-Encoding
Server: tfe
diff --git a/tests/examplefiles/livescript-demo.ls b/tests/examplefiles/livescript-demo.ls
new file mode 100644
index 00000000..2ff68c63
--- /dev/null
+++ b/tests/examplefiles/livescript-demo.ls
@@ -0,0 +1,41 @@
+a = -> [1 to 50]
+const b = --> [2 til 5]
+var c = ~~> 10_000_000km * 500ms - 16~ff / 32~lol
+e = (a) -> (b) ~> (c) --> (d, e) ~~> <[list of words]>
+dashes-identifiers = ->
+ a - a b -- c 1-1 1- -1 a- a a -a
+underscores_i$d = ->
+ /regexp1/
+ //regexp2//g
+ 'strings' and "strings" and \strings
+
+[2 til 10]
+ |> map (* 2)
+ |> filter (> 5)
+ |> fold (+)
+
+obj =
+ prop1: 1
+ prop2: 2
+
+class Class extends Anc-est-or
+ (args) ->
+ <- # Comment
+ <~ /* Comment */
+ void undefined yes no on off
+ a.void b.undefined c.off d.if f.no g.not
+ avoid bundefined coff dif fno gnot
+ "inter #{2 + 2} #variable"
+ '''HELLO 'world' '''
+
+copy = (from, to, callback) -->
+ error, data <- read file
+ return callback error if error?
+ error <~ write file, data
+ return callback error if error?
+ callback()
+
+take(n, [x, ...xs]:list) =
+ | n <= 0 => []
+ | empty list => []
+ | otherwise => [x] +++ take n - 1, xs
diff --git a/tests/examplefiles/test.R b/tests/examplefiles/test.R
index c53edd13..54325339 100644
--- a/tests/examplefiles/test.R
+++ b/tests/examplefiles/test.R
@@ -1,119 +1,153 @@
-###################################
-####### emplikH1.test() ##########
-###################################
-
-emplikH1.test <- function(x, d, theta, fun,
- tola = .Machine$double.eps^.25)
-{
-n <- length(x)
-if( n <= 2 ) stop("Need more observations")
-if( length(d) != n ) stop("length of x and d must agree")
-if(any((d!=0)&(d!=1))) stop("d must be 0/1's for censor/not-censor")
-if(!is.numeric(x)) stop("x must be numeric values --- observed times")
-
-#temp<-summary(survfit(Surv(x,d),se.fit=F,type="fleming",conf.type="none"))
-#
-newdata <- Wdataclean2(x,d)
-temp <- DnR(newdata$value, newdata$dd, newdata$weight)
-
-time <- temp$time # only uncensored time? Yes.
-risk <- temp$n.risk
-jump <- (temp$n.event)/risk
-
-funtime <- fun(time)
-funh <- (n/risk) * funtime # that is Zi
-funtimeTjump <- funtime * jump
-
-if(jump[length(jump)] >= 1) funh[length(jump)] <- 0 #for inthaz and weights
-
-inthaz <- function(x, ftj, fh, thet){ sum(ftj/(1 + x * fh)) - thet }
-
-diff <- inthaz(0, funtimeTjump, funh, theta)
-
-if( diff == 0 ) { lam <- 0 } else {
- step <- 0.2/sqrt(n)
- if(abs(diff) > 6*log(n)*step )
- stop("given theta value is too far away from theta0")
-
- mini<-0
- maxi<-0
- if(diff > 0) {
- maxi <- step
- while(inthaz(maxi, funtimeTjump, funh, theta) > 0 && maxi < 50*log(n)*step)
- maxi <- maxi+step
- }
- else {
- mini <- -step
- while(inthaz(mini, funtimeTjump, funh, theta) < 0 && mini > - 50*log(n)*step)
- mini <- mini - step
- }
-
- if(inthaz(mini, funtimeTjump, funh, theta)*inthaz(maxi, funtimeTjump, funh, theta) > 0 )
- stop("given theta is too far away from theta0")
-
- temp2 <- uniroot(inthaz,c(mini,maxi), tol = tola,
- ftj=funtimeTjump, fh=funh, thet=theta)
- lam <- temp2$root
+#!/usr/bin/env Rscript
+### Example R script for syntax highlighting
+
+# This is also a comment
+
+## Valid names
+abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUV0123456789._a <- NULL
+.foo_ <- NULL
+._foo <- NULL
+
+## Invalid names
+0abc <- NULL
+.0abc <- NULL
+abc+cde <- NULL
+
+## Reserved Words
+NA
+NA_integer_
+NA_real_
+NA_character_
+NA_complex_
+NULL
+NaN
+Inf
+## Not reserved
+NULLa <- NULL
+NULL1 <- NULL
+NULL. <- NULL
+NA_foo_ <- NULL
+
+## Numbers
+12345678901
+123456.78901
+123e3
+123E3
+1.23e-3
+1.23e3
+1.23e-3
+## integer constants
+123L
+1.23L
+## imaginary numbers
+123i
+-123i
+123e4i
+123e-4i
+## Hex numbers
+0xabcdefABCDEF01234
+0xabcp123
+0xabcP123
+## Not hex
+0xg
+
+## Special operators %xyz%
+## %xyz%
+1 %% 2
+diag(2) %*% diag(2)
+1 %/% 2
+1 %in% 1:10
+diag(2) %o% diag(2)
+diag(2) %x% diag(2)
+`%foo bar%` <- function(x, y) x + y
+1 %foo bar% 2
+
+## Control Structures (3.2) and Function
+## if, else
+if (TRUE) print("foo") else print("bar")
+## For, in
+for(i in 1:5) {
+ print(i)
}
-
-onepluslamh<- 1 + lam * funh ### this is 1 + lam Zi in Ref.
-
-weights <- jump/onepluslamh #need to change last jump to 1? NO. see above
-
-loglik <- 2*(sum(log(onepluslamh)) - sum((onepluslamh-1)/onepluslamh) )
-#?is that right? YES see (3.2) in Ref. above. This ALR, or Poisson LR.
-
-#last <- length(jump) ## to compute loglik2, we need to drop last jump
-#if (jump[last] == 1) {
-# risk1 <- risk[-last]
-# jump1 <- jump[-last]
-# weights1 <- weights[-last]
-# } else {
-# risk1 <- risk
-# jump1 <- jump
-# weights1 <- weights
-# }
-#loglik2 <- 2*( sum(log(onepluslamh)) +
-# sum( (risk1 -1)*log((1-jump1)/(1- weights1) ) ) )
-##? this likelihood seems have negative values sometimes???
-
-list( logemlik=loglik, ### logemlikv2=loglik2,
- lambda=lam, times=time, wts=weights,
- nits=temp2$nf, message=temp2$message )
+## While, break
+i <- 1
+while (TRUE) {
+ i <- i + 1
+ if (i > 3) break
}
-
-library("graphics")
-
-par(mfrow = c(1, 2))
-# plot histogram
-x <- rnorm(100)
-if (max(x) > 100)
- stop("Quite unexpected.")
-else
- hist(x, plot=TRUE, col="ivory")
-
-# from doc: lowess
-plot(cars, main = "lowess(cars)")
- lines(lowess(cars), col = 2)
- lines(lowess(cars, f=.2), col = 3)
- legend(5, 120, c(paste("f = ", c("2/3", ".2"))), lty = 1, col = 2:3)
-
-# from doc: is.na
-is.na(c(1, NA))
-
-# from doc: Extract
-y <- list(1,2,a=4,5)
-y[c(3,4)] # a list containing elements 3 and 4 of y
-y$a # the element of y named a
-
-# from doc: for
-for(n in c(2,5,10,20,50)) {
- x <- stats::rnorm(n)
- cat(n,":", sum(x2),"\n")
+## Repeat
+repeat {1+1}
+## Switch
+x <- 3
+switch(x, 2+2, mean(1:10), rnorm(5))
+## Function, dot-dot-dot, return
+foo <- function(...) {
+ return(sum(...))
+}
+# Not keywords
+functiona <- 2 + 2
+function. <- 2 + 2
+function1 <- 2 + 2
+
+
+## Grouping Tokens 10.3.7
+## Parentheses
+1 + (2 + 3)
+## brackets
+foo <- function(a) {
+ a + 1
}
-class(fo <- y ~ x1*x2) # "formula"
-
-
-
-
+## Indexing 10.3.8
+## []
+bar <- 1:10
+bar[3]
+## [[]]
+foo <- list(a=1, b=2, c=3)
+foo[["a"]]
+## $
+foo$a
+foo$"a"
+
+## Operators
+2 - 2
+2 + 2
+2 ~ 2
+! TRUE
+?"help"
+1:2
+2 * 2
+2 / 2
+2^2
+2 < 2
+2 > 2
+2 == 2
+2 >= 2
+2 <= 2
+2 != 2
+TRUE & FALSE
+TRUE && FALSE
+TRUE | FALSE
+TRUE || FALSE
+foo <- 2 + 2
+foo = 2 + 2
+2 + 2 -> foo
+foo <<- 2 + 2
+2 + 2 ->> foo
+base:::sum
+base::sum
+
+## Strings
+foo <- "hello, world!"
+foo <- 'hello, world!'
+foo <- "Hello, 'world!"
+foo <- 'Hello, "world!'
+foo <- 'Hello, \'world!\''
+foo <- "Hello, \"world!\""
+foo <- "Hello,
+world!"
+foo <- 'Hello,
+world!'
+
+## Backtick strings
+`foo123 +!"bar'baz` <- 2 + 2