summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorhsbt <hsbt@b2dd03c8-39d4-4d8f-98ff-823fe69b080e>2015-04-12 08:36:37 +0000
committerhsbt <hsbt@b2dd03c8-39d4-4d8f-98ff-823fe69b080e>2015-04-12 08:36:37 +0000
commit2e4f0af00f85ca228bcf5fa919882359411c652a (patch)
tree6317ef3d0c352a8d7496139f34277e224fe3d4ac
parent7b14512bee222bdb6392e865b00ef4b4c42b9364 (diff)
* ext/json/*, test/json/*: Reverted r50231. Because it's not works with
cross-compile environment. git-svn-id: svn+ssh://ci.ruby-lang.org/ruby/trunk@50267 b2dd03c8-39d4-4d8f-98ff-823fe69b080e
-rw-r--r--ChangeLog5
-rw-r--r--defs/default_gems1
-rw-r--r--ext/json/extconf.rb3
-rw-r--r--ext/json/fbuffer/fbuffer.h190
-rw-r--r--ext/json/generator/depend21
-rw-r--r--ext/json/generator/extconf.rb4
-rw-r--r--ext/json/generator/generator.c1463
-rw-r--r--ext/json/generator/generator.h167
-rw-r--r--ext/json/lib/json.rb62
-rw-r--r--ext/json/lib/json/add/bigdecimal.rb28
-rw-r--r--ext/json/lib/json/add/complex.rb28
-rw-r--r--ext/json/lib/json/add/core.rb11
-rw-r--r--ext/json/lib/json/add/date.rb34
-rw-r--r--ext/json/lib/json/add/date_time.rb50
-rw-r--r--ext/json/lib/json/add/exception.rb31
-rw-r--r--ext/json/lib/json/add/ostruct.rb31
-rw-r--r--ext/json/lib/json/add/range.rb29
-rw-r--r--ext/json/lib/json/add/rational.rb27
-rw-r--r--ext/json/lib/json/add/regexp.rb30
-rw-r--r--ext/json/lib/json/add/struct.rb30
-rw-r--r--ext/json/lib/json/add/symbol.rb25
-rw-r--r--ext/json/lib/json/add/time.rb38
-rw-r--r--ext/json/lib/json/common.rb484
-rw-r--r--ext/json/lib/json/ext.rb21
-rw-r--r--ext/json/lib/json/generic_object.rb62
-rw-r--r--ext/json/lib/json/version.rb8
-rw-r--r--ext/json/parser/depend20
-rw-r--r--ext/json/parser/extconf.rb3
-rw-r--r--ext/json/parser/parser.c2222
-rw-r--r--ext/json/parser/parser.h93
-rw-r--r--ext/json/parser/parser.rl945
-rw-r--r--ext/json/parser/prereq.mk10
-rw-r--r--gems/bundled_gems1
-rw-r--r--lib/rdoc/rubygems_hook.rb8
-rw-r--r--lib/rdoc/test_case.rb6
-rw-r--r--lib/rubygems/test_case.rb11
-rw-r--r--test/json/fixtures/fail1.json1
-rw-r--r--test/json/fixtures/fail10.json1
-rw-r--r--test/json/fixtures/fail11.json1
-rw-r--r--test/json/fixtures/fail12.json1
-rw-r--r--test/json/fixtures/fail13.json1
-rw-r--r--test/json/fixtures/fail14.json1
-rw-r--r--test/json/fixtures/fail18.json1
-rw-r--r--test/json/fixtures/fail19.json1
-rw-r--r--test/json/fixtures/fail2.json1
-rw-r--r--test/json/fixtures/fail20.json1
-rw-r--r--test/json/fixtures/fail21.json1
-rw-r--r--test/json/fixtures/fail22.json1
-rw-r--r--test/json/fixtures/fail23.json1
-rw-r--r--test/json/fixtures/fail24.json1
-rw-r--r--test/json/fixtures/fail25.json1
-rw-r--r--test/json/fixtures/fail27.json2
-rw-r--r--test/json/fixtures/fail28.json2
-rw-r--r--test/json/fixtures/fail3.json1
-rw-r--r--test/json/fixtures/fail4.json1
-rw-r--r--test/json/fixtures/fail5.json1
-rw-r--r--test/json/fixtures/fail6.json1
-rw-r--r--test/json/fixtures/fail7.json1
-rw-r--r--test/json/fixtures/fail8.json1
-rw-r--r--test/json/fixtures/fail9.json1
-rw-r--r--test/json/fixtures/pass1.json56
-rw-r--r--test/json/fixtures/pass15.json1
-rw-r--r--test/json/fixtures/pass16.json1
-rw-r--r--test/json/fixtures/pass17.json1
-rw-r--r--test/json/fixtures/pass2.json1
-rw-r--r--test/json/fixtures/pass26.json1
-rw-r--r--test/json/fixtures/pass3.json6
-rw-r--r--test/json/setup_variant.rb11
-rw-r--r--test/json/test_json.rb553
-rw-r--r--test/json/test_json_addition.rb196
-rw-r--r--test/json/test_json_encoding.rb65
-rw-r--r--test/json/test_json_fixtures.rb35
-rw-r--r--test/json/test_json_generate.rb347
-rw-r--r--test/json/test_json_generic_object.rb75
-rw-r--r--test/json/test_json_string_matching.rb39
-rw-r--r--test/json/test_json_unicode.rb72
-rw-r--r--test/rdoc/test_rdoc_generator_darkfish.rb3
-rw-r--r--test/rdoc/test_rdoc_generator_json_index.rb3
-rw-r--r--test/rdoc/test_rdoc_rubygems_hook.rb5
-rw-r--r--test/rdoc/test_rdoc_servlet.rb3
-rw-r--r--test/ruby/test_extlibs.rb1
-rw-r--r--test/rubygems/test_gem_commands_install_command.rb2
-rw-r--r--test/rubygems/test_gem_commands_update_command.rb3
83 files changed, 7679 insertions, 29 deletions
diff --git a/ChangeLog b/ChangeLog
index 86cc74fcd6..ff87efd128 100644
--- a/ChangeLog
+++ b/ChangeLog
@@ -1,3 +1,8 @@
+Sun Apr 12 17:35:17 2015 SHIBATA Hiroshi <shibata.hiroshi@gmail.com>
+
+ * ext/json/*, test/json/*: Reverted r50231. Because it's not works with
+ cross-compile environment.
+
Sun Apr 12 15:34:59 2015 Nobuyoshi Nakada <nobu@ruby-lang.org>
* parse.y (arg): fix segfault by null caused by syntax error.
diff --git a/defs/default_gems b/defs/default_gems
index 639392d012..bb6995c85a 100644
--- a/defs/default_gems
+++ b/defs/default_gems
@@ -1,2 +1,3 @@
# gem base directory versioning file [executable files under bin]
rdoc lib/rdoc lib/rdoc.rb [rdoc ri]
+json ext/json ext/json/lib/json/version.rb
diff --git a/ext/json/extconf.rb b/ext/json/extconf.rb
new file mode 100644
index 0000000000..850798c643
--- /dev/null
+++ b/ext/json/extconf.rb
@@ -0,0 +1,3 @@
+require 'mkmf'
+create_makefile('json')
+
diff --git a/ext/json/fbuffer/fbuffer.h b/ext/json/fbuffer/fbuffer.h
new file mode 100644
index 0000000000..5a0a27cda5
--- /dev/null
+++ b/ext/json/fbuffer/fbuffer.h
@@ -0,0 +1,190 @@
+
+#ifndef _FBUFFER_H_
+#define _FBUFFER_H_
+
+#include "ruby.h"
+
+#ifndef RHASH_SIZE
+#define RHASH_SIZE(hsh) (RHASH(hsh)->tbl->num_entries)
+#endif
+
+#ifndef RFLOAT_VALUE
+#define RFLOAT_VALUE(val) (RFLOAT(val)->value)
+#endif
+
+#ifndef RARRAY_PTR
+#define RARRAY_PTR(ARRAY) RARRAY(ARRAY)->ptr
+#endif
+#ifndef RARRAY_LEN
+#define RARRAY_LEN(ARRAY) RARRAY(ARRAY)->len
+#endif
+#ifndef RSTRING_PTR
+#define RSTRING_PTR(string) RSTRING(string)->ptr
+#endif
+#ifndef RSTRING_LEN
+#define RSTRING_LEN(string) RSTRING(string)->len
+#endif
+
+#ifdef PRIsVALUE
+# define RB_OBJ_CLASSNAME(obj) rb_obj_class(obj)
+# define RB_OBJ_STRING(obj) (obj)
+#else
+# define PRIsVALUE "s"
+# define RB_OBJ_CLASSNAME(obj) rb_obj_classname(obj)
+# define RB_OBJ_STRING(obj) StringValueCStr(obj)
+#endif
+
+#ifdef HAVE_RUBY_ENCODING_H
+#include "ruby/encoding.h"
+#define FORCE_UTF8(obj) rb_enc_associate((obj), rb_utf8_encoding())
+#else
+#define FORCE_UTF8(obj)
+#endif
+
+/* We don't need to guard objects for rbx, so let's do nothing at all. */
+#ifndef RB_GC_GUARD
+#define RB_GC_GUARD(object)
+#endif
+
+typedef struct FBufferStruct {
+ unsigned long initial_length;
+ char *ptr;
+ unsigned long len;
+ unsigned long capa;
+} FBuffer;
+
+#define FBUFFER_INITIAL_LENGTH_DEFAULT 1024
+
+#define FBUFFER_PTR(fb) (fb->ptr)
+#define FBUFFER_LEN(fb) (fb->len)
+#define FBUFFER_CAPA(fb) (fb->capa)
+#define FBUFFER_PAIR(fb) FBUFFER_PTR(fb), FBUFFER_LEN(fb)
+
+static FBuffer *fbuffer_alloc(unsigned long initial_length);
+static void fbuffer_free(FBuffer *fb);
+static void fbuffer_clear(FBuffer *fb);
+static void fbuffer_append(FBuffer *fb, const char *newstr, unsigned long len);
+#ifdef JSON_GENERATOR
+static void fbuffer_append_long(FBuffer *fb, long number);
+#endif
+static void fbuffer_append_char(FBuffer *fb, char newchr);
+#ifdef JSON_GENERATOR
+static FBuffer *fbuffer_dup(FBuffer *fb);
+static VALUE fbuffer_to_s(FBuffer *fb);
+#endif
+
+static FBuffer *fbuffer_alloc(unsigned long initial_length)
+{
+ FBuffer *fb;
+ if (initial_length <= 0) initial_length = FBUFFER_INITIAL_LENGTH_DEFAULT;
+ fb = ALLOC(FBuffer);
+ memset((void *) fb, 0, sizeof(FBuffer));
+ fb->initial_length = initial_length;
+ return fb;
+}
+
+static void fbuffer_free(FBuffer *fb)
+{
+ if (fb->ptr) ruby_xfree(fb->ptr);
+ ruby_xfree(fb);
+}
+
+static void fbuffer_clear(FBuffer *fb)
+{
+ fb->len = 0;
+}
+
+static void fbuffer_inc_capa(FBuffer *fb, unsigned long requested)
+{
+ unsigned long required;
+
+ if (!fb->ptr) {
+ fb->ptr = ALLOC_N(char, fb->initial_length);
+ fb->capa = fb->initial_length;
+ }
+
+ for (required = fb->capa; requested > required - fb->len; required <<= 1);
+
+ if (required > fb->capa) {
+ REALLOC_N(fb->ptr, char, required);
+ fb->capa = required;
+ }
+}
+
+static void fbuffer_append(FBuffer *fb, const char *newstr, unsigned long len)
+{
+ if (len > 0) {
+ fbuffer_inc_capa(fb, len);
+ MEMCPY(fb->ptr + fb->len, newstr, char, len);
+ fb->len += len;
+ }
+}
+
+#ifdef JSON_GENERATOR
+static void fbuffer_append_str(FBuffer *fb, VALUE str)
+{
+ const char *newstr = StringValuePtr(str);
+ unsigned long len = RSTRING_LEN(str);
+
+ RB_GC_GUARD(str);
+
+ fbuffer_append(fb, newstr, len);
+}
+#endif
+
+static void fbuffer_append_char(FBuffer *fb, char newchr)
+{
+ fbuffer_inc_capa(fb, 1);
+ *(fb->ptr + fb->len) = newchr;
+ fb->len++;
+}
+
+#ifdef JSON_GENERATOR
+static void freverse(char *start, char *end)
+{
+ char c;
+
+ while (end > start) {
+ c = *end, *end-- = *start, *start++ = c;
+ }
+}
+
+static long fltoa(long number, char *buf)
+{
+ static char digits[] = "0123456789";
+ long sign = number;
+ char* tmp = buf;
+
+ if (sign < 0) number = -number;
+ do *tmp++ = digits[number % 10]; while (number /= 10);
+ if (sign < 0) *tmp++ = '-';
+ freverse(buf, tmp - 1);
+ return tmp - buf;
+}
+
+static void fbuffer_append_long(FBuffer *fb, long number)
+{
+ char buf[20];
+ unsigned long len = fltoa(number, buf);
+ fbuffer_append(fb, buf, len);
+}
+
+static FBuffer *fbuffer_dup(FBuffer *fb)
+{
+ unsigned long len = fb->len;
+ FBuffer *result;
+
+ result = fbuffer_alloc(len);
+ fbuffer_append(result, FBUFFER_PAIR(fb));
+ return result;
+}
+
+static VALUE fbuffer_to_s(FBuffer *fb)
+{
+ VALUE result = rb_str_new(FBUFFER_PTR(fb), FBUFFER_LEN(fb));
+ fbuffer_free(fb);
+ FORCE_UTF8(result);
+ return result;
+}
+#endif
+#endif
diff --git a/ext/json/generator/depend b/ext/json/generator/depend
new file mode 100644
index 0000000000..b7373cde1f
--- /dev/null
+++ b/ext/json/generator/depend
@@ -0,0 +1,21 @@
+$(OBJS): $(ruby_headers)
+generator.o: generator.c generator.h $(srcdir)/../fbuffer/fbuffer.h
+
+# AUTOGENERATED DEPENDENCIES START
+generator.o: $(RUBY_EXTCONF_H)
+generator.o: $(arch_hdrdir)/ruby/config.h
+generator.o: $(hdrdir)/ruby/defines.h
+generator.o: $(hdrdir)/ruby/encoding.h
+generator.o: $(hdrdir)/ruby/intern.h
+generator.o: $(hdrdir)/ruby/missing.h
+generator.o: $(hdrdir)/ruby/oniguruma.h
+generator.o: $(hdrdir)/ruby/re.h
+generator.o: $(hdrdir)/ruby/regex.h
+generator.o: $(hdrdir)/ruby/ruby.h
+generator.o: $(hdrdir)/ruby/st.h
+generator.o: $(hdrdir)/ruby/subst.h
+generator.o: $(top_srcdir)/ext/json/fbuffer/fbuffer.h
+generator.o: $(top_srcdir)/include/ruby.h
+generator.o: generator.c
+generator.o: generator.h
+# AUTOGENERATED DEPENDENCIES END
diff --git a/ext/json/generator/extconf.rb b/ext/json/generator/extconf.rb
new file mode 100644
index 0000000000..8627c5f4bd
--- /dev/null
+++ b/ext/json/generator/extconf.rb
@@ -0,0 +1,4 @@
+require 'mkmf'
+
+$defs << "-DJSON_GENERATOR"
+create_makefile 'json/ext/generator'
diff --git a/ext/json/generator/generator.c b/ext/json/generator/generator.c
new file mode 100644
index 0000000000..baf5360bb1
--- /dev/null
+++ b/ext/json/generator/generator.c
@@ -0,0 +1,1463 @@
+#include "../fbuffer/fbuffer.h"
+#include "generator.h"
+
+#ifdef HAVE_RUBY_ENCODING_H
+static VALUE CEncoding_UTF_8;
+static ID i_encoding, i_encode;
+#endif
+
+static VALUE mJSON, mExt, mGenerator, cState, mGeneratorMethods, mObject,
+ mHash, mArray, mFixnum, mBignum, mFloat, mString, mString_Extend,
+ mTrueClass, mFalseClass, mNilClass, eGeneratorError,
+ eNestingError, CRegexp_MULTILINE, CJSON_SAFE_STATE_PROTOTYPE,
+ i_SAFE_STATE_PROTOTYPE;
+
+static ID i_to_s, i_to_json, i_new, i_indent, i_space, i_space_before,
+ i_object_nl, i_array_nl, i_max_nesting, i_allow_nan, i_ascii_only,
+ i_quirks_mode, i_pack, i_unpack, i_create_id, i_extend, i_key_p,
+ i_aref, i_send, i_respond_to_p, i_match, i_keys, i_depth,
+ i_buffer_initial_length, i_dup;
+
+/*
+ * Copyright 2001-2004 Unicode, Inc.
+ *
+ * Disclaimer
+ *
+ * This source code is provided as is by Unicode, Inc. No claims are
+ * made as to fitness for any particular purpose. No warranties of any
+ * kind are expressed or implied. The recipient agrees to determine
+ * applicability of information provided. If this file has been
+ * purchased on magnetic or optical media from Unicode, Inc., the
+ * sole remedy for any claim will be exchange of defective media
+ * within 90 days of receipt.
+ *
+ * Limitations on Rights to Redistribute This Code
+ *
+ * Unicode, Inc. hereby grants the right to freely use the information
+ * supplied in this file in the creation of products supporting the
+ * Unicode Standard, and to make copies of this file in any form
+ * for internal or external distribution as long as this notice
+ * remains attached.
+ */
+
+/*
+ * Index into the table below with the first byte of a UTF-8 sequence to
+ * get the number of trailing bytes that are supposed to follow it.
+ * Note that *legal* UTF-8 values can't have 4 or 5-bytes. The table is
+ * left as-is for anyone who may want to do such conversion, which was
+ * allowed in earlier algorithms.
+ */
+static const char trailingBytesForUTF8[256] = {
+ 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+ 2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2, 3,3,3,3,3,3,3,3,4,4,4,4,5,5,5,5
+};
+
+/*
+ * Magic values subtracted from a buffer value during UTF8 conversion.
+ * This table contains as many values as there might be trailing bytes
+ * in a UTF-8 sequence.
+ */
+static const UTF32 offsetsFromUTF8[6] = { 0x00000000UL, 0x00003080UL, 0x000E2080UL,
+ 0x03C82080UL, 0xFA082080UL, 0x82082080UL };
+
+/*
+ * Utility routine to tell whether a sequence of bytes is legal UTF-8.
+ * This must be called with the length pre-determined by the first byte.
+ * If not calling this from ConvertUTF8to*, then the length can be set by:
+ * length = trailingBytesForUTF8[*source]+1;
+ * and the sequence is illegal right away if there aren't that many bytes
+ * available.
+ * If presented with a length > 4, this returns 0. The Unicode
+ * definition of UTF-8 goes up to 4-byte sequences.
+ */
+static unsigned char isLegalUTF8(const UTF8 *source, unsigned long length)
+{
+ UTF8 a;
+ const UTF8 *srcptr = source+length;
+ switch (length) {
+ default: return 0;
+ /* Everything else falls through when "1"... */
+ case 4: if ((a = (*--srcptr)) < 0x80 || a > 0xBF) return 0;
+ case 3: if ((a = (*--srcptr)) < 0x80 || a > 0xBF) return 0;
+ case 2: if ((a = (*--srcptr)) > 0xBF) return 0;
+
+ switch (*source) {
+ /* no fall-through in this inner switch */
+ case 0xE0: if (a < 0xA0) return 0; break;
+ case 0xED: if (a > 0x9F) return 0; break;
+ case 0xF0: if (a < 0x90) return 0; break;
+ case 0xF4: if (a > 0x8F) return 0; break;
+ default: if (a < 0x80) return 0;
+ }
+
+ case 1: if (*source >= 0x80 && *source < 0xC2) return 0;
+ }
+ if (*source > 0xF4) return 0;
+ return 1;
+}
+
+/* Escapes the UTF16 character and stores the result in the buffer buf. */
+static void unicode_escape(char *buf, UTF16 character)
+{
+ const char *digits = "0123456789abcdef";
+
+ buf[2] = digits[character >> 12];
+ buf[3] = digits[(character >> 8) & 0xf];
+ buf[4] = digits[(character >> 4) & 0xf];
+ buf[5] = digits[character & 0xf];
+}
+
+/* Escapes the UTF16 character and stores the result in the buffer buf, then
+ * the buffer buf is appended to the FBuffer buffer. */
+static void unicode_escape_to_buffer(FBuffer *buffer, char buf[6], UTF16
+ character)
+{
+ unicode_escape(buf, character);
+ fbuffer_append(buffer, buf, 6);
+}
+
+/* Converts string to a JSON string in FBuffer buffer, where all but the ASCII
+ * and control characters are JSON escaped. */
+static void convert_UTF8_to_JSON_ASCII(FBuffer *buffer, VALUE string)
+{
+ const UTF8 *source = (UTF8 *) RSTRING_PTR(string);
+ const UTF8 *sourceEnd = source + RSTRING_LEN(string);
+ char buf[6] = { '\\', 'u' };
+
+ while (source < sourceEnd) {
+ UTF32 ch = 0;
+ unsigned short extraBytesToRead = trailingBytesForUTF8[*source];
+ if (source + extraBytesToRead >= sourceEnd) {
+ rb_raise(rb_path2class("JSON::GeneratorError"),
+ "partial character in source, but hit end");
+ }
+ if (!isLegalUTF8(source, extraBytesToRead+1)) {
+ rb_raise(rb_path2class("JSON::GeneratorError"),
+ "source sequence is illegal/malformed utf-8");
+ }
+ /*
+ * The cases all fall through. See "Note A" below.
+ */
+ switch (extraBytesToRead) {
+ case 5: ch += *source++; ch <<= 6; /* remember, illegal UTF-8 */
+ case 4: ch += *source++; ch <<= 6; /* remember, illegal UTF-8 */
+ case 3: ch += *source++; ch <<= 6;
+ case 2: ch += *source++; ch <<= 6;
+ case 1: ch += *source++; ch <<= 6;
+ case 0: ch += *source++;
+ }
+ ch -= offsetsFromUTF8[extraBytesToRead];
+
+ if (ch <= UNI_MAX_BMP) { /* Target is a character <= 0xFFFF */
+ /* UTF-16 surrogate values are illegal in UTF-32 */
+ if (ch >= UNI_SUR_HIGH_START && ch <= UNI_SUR_LOW_END) {
+#if UNI_STRICT_CONVERSION
+ source -= (extraBytesToRead+1); /* return to the illegal value itself */
+ rb_raise(rb_path2class("JSON::GeneratorError"),
+ "source sequence is illegal/malformed utf-8");
+#else
+ unicode_escape_to_buffer(buffer, buf, UNI_REPLACEMENT_CHAR);
+#endif
+ } else {
+ /* normal case */
+ if (ch >= 0x20 && ch <= 0x7f) {
+ switch (ch) {
+ case '\\':
+ fbuffer_append(buffer, "\\\\", 2);
+ break;
+ case '"':
+ fbuffer_append(buffer, "\\\"", 2);
+ break;
+ default:
+ fbuffer_append_char(buffer, (char)ch);
+ break;
+ }
+ } else {
+ switch (ch) {
+ case '\n':
+ fbuffer_append(buffer, "\\n", 2);
+ break;
+ case '\r':
+ fbuffer_append(buffer, "\\r", 2);
+ break;
+ case '\t':
+ fbuffer_append(buffer, "\\t", 2);
+ break;
+ case '\f':
+ fbuffer_append(buffer, "\\f", 2);
+ break;
+ case '\b':
+ fbuffer_append(buffer, "\\b", 2);
+ break;
+ default:
+ unicode_escape_to_buffer(buffer, buf, (UTF16) ch);
+ break;
+ }
+ }
+ }
+ } else if (ch > UNI_MAX_UTF16) {
+#if UNI_STRICT_CONVERSION
+ source -= (extraBytesToRead+1); /* return to the start */
+ rb_raise(rb_path2class("JSON::GeneratorError"),
+ "source sequence is illegal/malformed utf8");
+#else
+ unicode_escape_to_buffer(buffer, buf, UNI_REPLACEMENT_CHAR);
+#endif
+ } else {
+ /* target is a character in range 0xFFFF - 0x10FFFF. */
+ ch -= halfBase;
+ unicode_escape_to_buffer(buffer, buf, (UTF16)((ch >> halfShift) + UNI_SUR_HIGH_START));
+ unicode_escape_to_buffer(buffer, buf, (UTF16)((ch & halfMask) + UNI_SUR_LOW_START));
+ }
+ }
+}
+
+/* Converts string to a JSON string in FBuffer buffer, where only the
+ * characters required by the JSON standard are JSON escaped. The remaining
+ * characters (should be UTF8) are just passed through and appended to the
+ * result. */
+static void convert_UTF8_to_JSON(FBuffer *buffer, VALUE string)
+{
+ const char *ptr = RSTRING_PTR(string), *p;
+ unsigned long len = RSTRING_LEN(string), start = 0, end = 0;
+ const char *escape = NULL;
+ int escape_len;
+ unsigned char c;
+ char buf[6] = { '\\', 'u' };
+
+ for (start = 0, end = 0; end < len;) {
+ p = ptr + end;
+ c = (unsigned char) *p;
+ if (c < 0x20) {
+ switch (c) {
+ case '\n':
+ escape = "\\n";
+ escape_len = 2;
+ break;
+ case '\r':
+ escape = "\\r";
+ escape_len = 2;
+ break;
+ case '\t':
+ escape = "\\t";
+ escape_len = 2;
+ break;
+ case '\f':
+ escape = "\\f";
+ escape_len = 2;
+ break;
+ case '\b':
+ escape = "\\b";
+ escape_len = 2;
+ break;
+ default:
+ unicode_escape(buf, (UTF16) *p);
+ escape = buf;
+ escape_len = 6;
+ break;
+ }
+ } else {
+ switch (c) {
+ case '\\':
+ escape = "\\\\";
+ escape_len = 2;
+ break;
+ case '"':
+ escape = "\\\"";
+ escape_len = 2;
+ break;
+ default:
+ {
+ unsigned short clen = trailingBytesForUTF8[c] + 1;
+ if (end + clen > len) {
+ rb_raise(rb_path2class("JSON::GeneratorError"),
+ "partial character in source, but hit end");
+ }
+ if (!isLegalUTF8((UTF8 *) p, clen)) {
+ rb_raise(rb_path2class("JSON::GeneratorError"),
+ "source sequence is illegal/malformed utf-8");
+ }
+ end += clen;
+ }
+ continue;
+ break;
+ }
+ }
+ fbuffer_append(buffer, ptr + start, end - start);
+ fbuffer_append(buffer, escape, escape_len);
+ start = ++end;
+ escape = NULL;
+ }
+ fbuffer_append(buffer, ptr + start, end - start);
+}
+
+static char *fstrndup(const char *ptr, unsigned long len) {
+ char *result;
+ if (len <= 0) return NULL;
+ result = ALLOC_N(char, len);
+ memccpy(result, ptr, 0, len);
+ return result;
+}
+
+/*
+ * Document-module: JSON::Ext::Generator
+ *
+ * This is the JSON generator implemented as a C extension. It can be
+ * configured to be used by setting
+ *
+ * JSON.generator = JSON::Ext::Generator
+ *
+ * with the method generator= in JSON.
+ *
+ */
+
+/*
+ * call-seq: to_json(state = nil)
+ *
+ * Returns a JSON string containing a JSON object, that is generated from
+ * this Hash instance.
+ * _state_ is a JSON::State object, that can also be used to configure the
+ * produced JSON string output further.
+ */
+static VALUE mHash_to_json(int argc, VALUE *argv, VALUE self)
+{
+ GENERATE_JSON(object);
+}
+
+/*
+ * call-seq: to_json(state = nil)
+ *
+ * Returns a JSON string containing a JSON array, that is generated from
+ * this Array instance.
+ * _state_ is a JSON::State object, that can also be used to configure the
+ * produced JSON string output further.
+ */
+static VALUE mArray_to_json(int argc, VALUE *argv, VALUE self) {
+ GENERATE_JSON(array);
+}
+
+/*
+ * call-seq: to_json(*)
+ *
+ * Returns a JSON string representation for this Integer number.
+ */
+static VALUE mFixnum_to_json(int argc, VALUE *argv, VALUE self)
+{
+ GENERATE_JSON(fixnum);
+}
+
+/*
+ * call-seq: to_json(*)
+ *
+ * Returns a JSON string representation for this Integer number.
+ */
+static VALUE mBignum_to_json(int argc, VALUE *argv, VALUE self)
+{
+ GENERATE_JSON(bignum);
+}
+
+/*
+ * call-seq: to_json(*)
+ *
+ * Returns a JSON string representation for this Float number.
+ */
+static VALUE mFloat_to_json(int argc, VALUE *argv, VALUE self)
+{
+ GENERATE_JSON(float);
+}
+
+/*
+ * call-seq: String.included(modul)
+ *
+ * Extends _modul_ with the String::Extend module.
+ */
+static VALUE mString_included_s(VALUE self, VALUE modul) {
+ VALUE result = rb_funcall(modul, i_extend, 1, mString_Extend);
+ return result;
+}
+
+/*
+ * call-seq: to_json(*)
+ *
+ * This string should be encoded with UTF-8 A call to this method
+ * returns a JSON string encoded with UTF16 big endian characters as
+ * \u????.
+ */
+static VALUE mString_to_json(int argc, VALUE *argv, VALUE self)
+{
+ GENERATE_JSON(string);
+}
+
+/*
+ * call-seq: to_json_raw_object()
+ *
+ * This method creates a raw object hash, that can be nested into
+ * other data structures and will be generated as a raw string. This
+ * method should be used, if you want to convert raw strings to JSON
+ * instead of UTF-8 strings, e. g. binary data.
+ */
+static VALUE mString_to_json_raw_object(VALUE self)
+{
+ VALUE ary;
+ VALUE result = rb_hash_new();
+ rb_hash_aset(result, rb_funcall(mJSON, i_create_id, 0), rb_class_name(rb_obj_class(self)));
+ ary = rb_funcall(self, i_unpack, 1, rb_str_new2("C*"));
+ rb_hash_aset(result, rb_str_new2("raw"), ary);
+ return result;
+}
+
+/*
+ * call-seq: to_json_raw(*args)
+ *
+ * This method creates a JSON text from the result of a call to
+ * to_json_raw_object of this String.
+ */
+static VALUE mString_to_json_raw(int argc, VALUE *argv, VALUE self)
+{
+ VALUE obj = mString_to_json_raw_object(self);
+ Check_Type(obj, T_HASH);
+ return mHash_to_json(argc, argv, obj);
+}
+
+/*
+ * call-seq: json_create(o)
+ *
+ * Raw Strings are JSON Objects (the raw bytes are stored in an array for the
+ * key "raw"). The Ruby String can be created by this module method.
+ */
+static VALUE mString_Extend_json_create(VALUE self, VALUE o)
+{
+ VALUE ary;
+ Check_Type(o, T_HASH);
+ ary = rb_hash_aref(o, rb_str_new2("raw"));
+ return rb_funcall(ary, i_pack, 1, rb_str_new2("C*"));
+}
+
+/*
+ * call-seq: to_json(*)
+ *
+ * Returns a JSON string for true: 'true'.
+ */
+static VALUE mTrueClass_to_json(int argc, VALUE *argv, VALUE self)
+{
+ GENERATE_JSON(true);
+}
+
+/*
+ * call-seq: to_json(*)
+ *
+ * Returns a JSON string for false: 'false'.
+ */
+static VALUE mFalseClass_to_json(int argc, VALUE *argv, VALUE self)
+{
+ GENERATE_JSON(false);
+}
+
+/*
+ * call-seq: to_json(*)
+ *
+ * Returns a JSON string for nil: 'null'.
+ */
+static VALUE mNilClass_to_json(int argc, VALUE *argv, VALUE self)
+{
+ GENERATE_JSON(null);
+}
+
+/*
+ * call-seq: to_json(*)
+ *
+ * Converts this object to a string (calling #to_s), converts
+ * it to a JSON string, and returns the result. This is a fallback, if no
+ * special method #to_json was defined for some object.
+ */
+static VALUE mObject_to_json(int argc, VALUE *argv, VALUE self)
+{
+ VALUE state;
+ VALUE string = rb_funcall(self, i_to_s, 0);
+ rb_scan_args(argc, argv, "01", &state);
+ Check_Type(string, T_STRING);
+ state = cState_from_state_s(cState, state);
+ return cState_partial_generate(state, string);
+}
+
+static void State_free(void *ptr)
+{
+ JSON_Generator_State *state = ptr;
+ if (state->indent) ruby_xfree(state->indent);
+ if (state->space) ruby_xfree(state->space);
+ if (state->space_before) ruby_xfree(state->space_before);
+ if (state->object_nl) ruby_xfree(state->object_nl);
+ if (state->array_nl) ruby_xfree(state->array_nl);
+ if (state->array_delim) fbuffer_free(state->array_delim);
+ if (state->object_delim) fbuffer_free(state->object_delim);
+ if (state->object_delim2) fbuffer_free(state->object_delim2);
+ ruby_xfree(state);
+}
+
+static size_t State_memsize(const void *ptr)
+{
+ const JSON_Generator_State *state = ptr;
+ size_t size = sizeof(*state);
+ if (state->indent) size += state->indent_len + 1;
+ if (state->space) size += state->space_len + 1;
+ if (state->space_before) size += state->space_before_len + 1;
+ if (state->object_nl) size += state->object_nl_len + 1;
+ if (state->array_nl) size += state->array_nl_len + 1;
+ if (state->array_delim) size += FBUFFER_CAPA(state->array_delim);
+ if (state->object_delim) size += FBUFFER_CAPA(state->object_delim);
+ if (state->object_delim2) size += FBUFFER_CAPA(state->object_delim2);
+ return size;
+}
+
+#ifdef NEW_TYPEDDATA_WRAPPER
+static const rb_data_type_t JSON_Generator_State_type = {
+ "JSON/Generator/State",
+ {NULL, State_free, State_memsize,},
+#ifdef RUBY_TYPED_FREE_IMMEDIATELY
+ 0, 0,
+ RUBY_TYPED_FREE_IMMEDIATELY,
+#endif
+};
+#endif
+
+static JSON_Generator_State *State_allocate(void)
+{
+ JSON_Generator_State *state = ZALLOC(JSON_Generator_State);
+ return state;
+}
+
+static VALUE cState_s_allocate(VALUE klass)
+{
+ JSON_Generator_State *state = State_allocate();
+ return TypedData_Wrap_Struct(klass, &JSON_Generator_State_type, state);
+}
+
+/*
+ * call-seq: configure(opts)
+ *
+ * Configure this State instance with the Hash _opts_, and return
+ * itself.
+ */
+static VALUE cState_configure(VALUE self, VALUE opts)
+{
+ VALUE tmp;
+ GET_STATE(self);
+ tmp = rb_check_convert_type(opts, T_HASH, "Hash", "to_hash");
+ if (NIL_P(tmp)) tmp = rb_convert_type(opts, T_HASH, "Hash", "to_h");
+ opts = tmp;
+ tmp = rb_hash_aref(opts, ID2SYM(i_indent));
+ if (RTEST(tmp)) {
+ unsigned long len;
+ Check_Type(tmp, T_STRING);
+ len = RSTRING_LEN(tmp);
+ state->indent = fstrndup(RSTRING_PTR(tmp), len + 1);
+ state->indent_len = len;
+ }
+ tmp = rb_hash_aref(opts, ID2SYM(i_space));
+ if (RTEST(tmp)) {
+ unsigned long len;
+ Check_Type(tmp, T_STRING);
+ len = RSTRING_LEN(tmp);
+ state->space = fstrndup(RSTRING_PTR(tmp), len + 1);
+ state->space_len = len;
+ }
+ tmp = rb_hash_aref(opts, ID2SYM(i_space_before));
+ if (RTEST(tmp)) {
+ unsigned long len;
+ Check_Type(tmp, T_STRING);
+ len = RSTRING_LEN(tmp);
+ state->space_before = fstrndup(RSTRING_PTR(tmp), len + 1);
+ state->space_before_len = len;
+ }
+ tmp = rb_hash_aref(opts, ID2SYM(i_array_nl));
+ if (RTEST(tmp)) {
+ unsigned long len;
+ Check_Type(tmp, T_STRING);
+ len = RSTRING_LEN(tmp);
+ state->array_nl = fstrndup(RSTRING_PTR(tmp), len + 1);
+ state->array_nl_len = len;
+ }
+ tmp = rb_hash_aref(opts, ID2SYM(i_object_nl));
+ if (RTEST(tmp)) {
+ unsigned long len;
+ Check_Type(tmp, T_STRING);
+ len = RSTRING_LEN(tmp);
+ state->object_nl = fstrndup(RSTRING_PTR(tmp), len + 1);
+ state->object_nl_len = len;
+ }
+ tmp = ID2SYM(i_max_nesting);
+ state->max_nesting = 100;
+ if (option_given_p(opts, tmp)) {
+ VALUE max_nesting = rb_hash_aref(opts, tmp);
+ if (RTEST(max_nesting)) {
+ Check_Type(max_nesting, T_FIXNUM);
+ state->max_nesting = FIX2LONG(max_nesting);
+ } else {
+ state->max_nesting = 0;
+ }
+ }
+ tmp = ID2SYM(i_depth);
+ state->depth = 0;
+ if (option_given_p(opts, tmp)) {
+ VALUE depth = rb_hash_aref(opts, tmp);
+ if (RTEST(depth)) {
+ Check_Type(depth, T_FIXNUM);
+ state->depth = FIX2LONG(depth);
+ } else {
+ state->depth = 0;
+ }
+ }
+ tmp = ID2SYM(i_buffer_initial_length);
+ if (option_given_p(opts, tmp)) {
+ VALUE buffer_initial_length = rb_hash_aref(opts, tmp);
+ if (RTEST(buffer_initial_length)) {
+ long initial_length;
+ Check_Type(buffer_initial_length, T_FIXNUM);
+ initial_length = FIX2LONG(buffer_initial_length);
+ if (initial_length > 0) state->buffer_initial_length = initial_length;
+ }
+ }
+ tmp = rb_hash_aref(opts, ID2SYM(i_allow_nan));
+ state->allow_nan = RTEST(tmp);
+ tmp = rb_hash_aref(opts, ID2SYM(i_ascii_only));
+ state->ascii_only = RTEST(tmp);
+ tmp = rb_hash_aref(opts, ID2SYM(i_quirks_mode));
+ state->quirks_mode = RTEST(tmp);
+ return self;
+}
+
+static void set_state_ivars(VALUE hash, VALUE state)
+{
+ VALUE ivars = rb_obj_instance_variables(state);
+ int i = 0;
+ for (i = 0; i < RARRAY_LEN(ivars); i++) {
+ VALUE key = rb_funcall(rb_ary_entry(ivars, i), i_to_s, 0);
+ long key_len = RSTRING_LEN(key);
+ VALUE value = rb_iv_get(state, StringValueCStr(key));
+ rb_hash_aset(hash, rb_str_intern(rb_str_substr(key, 1, key_len - 1)), value);
+ }
+}
+
+/*
+ * call-seq: to_h
+ *
+ * Returns the configuration instance variables as a hash, that can be
+ * passed to the configure method.
+ */
+static VALUE cState_to_h(VALUE self)
+{
+ VALUE result = rb_hash_new();
+ GET_STATE(self);
+ set_state_ivars(result, self);
+ rb_hash_aset(result, ID2SYM(i_indent), rb_str_new(state->indent, state->indent_len));
+ rb_hash_aset(result, ID2SYM(i_space), rb_str_new(state->space, state->space_len));
+ rb_hash_aset(result, ID2SYM(i_space_before), rb_str_new(state->space_before, state->space_before_len));
+ rb_hash_aset(result, ID2SYM(i_object_nl), rb_str_new(state->object_nl, state->object_nl_len));
+ rb_hash_aset(result, ID2SYM(i_array_nl), rb_str_new(state->array_nl, state->array_nl_len));
+ rb_hash_aset(result, ID2SYM(i_allow_nan), state->allow_nan ? Qtrue : Qfalse);
+ rb_hash_aset(result, ID2SYM(i_ascii_only), state->ascii_only ? Qtrue : Qfalse);
+ rb_hash_aset(result, ID2SYM(i_quirks_mode), state->quirks_mode ? Qtrue : Qfalse);
+ rb_hash_aset(result, ID2SYM(i_max_nesting), LONG2FIX(state->max_nesting));
+ rb_hash_aset(result, ID2SYM(i_depth), LONG2FIX(state->depth));
+ rb_hash_aset(result, ID2SYM(i_buffer_initial_length), LONG2FIX(state->buffer_initial_length));
+ return result;
+}
+
+/*
+* call-seq: [](name)
+*
+* Returns the value returned by method +name+.
+*/
+static VALUE cState_aref(VALUE self, VALUE name)
+{
+ name = rb_funcall(name, i_to_s, 0);
+ if (RTEST(rb_funcall(self, i_respond_to_p, 1, name))) {
+ return rb_funcall(self, i_send, 1, name);
+ } else {
+ return rb_ivar_get(self, rb_intern_str(rb_str_concat(rb_str_new2("@"), name)));
+ }
+}
+
+/*
+* call-seq: []=(name, value)
+*
+* Sets the attribute name to value.
+*/
+static VALUE cState_aset(VALUE self, VALUE name, VALUE value)
+{
+ VALUE name_writer;
+
+ name = rb_funcall(name, i_to_s, 0);
+ name_writer = rb_str_cat2(rb_str_dup(name), "=");
+ if (RTEST(rb_funcall(self, i_respond_to_p, 1, name_writer))) {
+ return rb_funcall(self, i_send, 2, name_writer, value);
+ } else {
+ rb_ivar_set(self, rb_intern_str(rb_str_concat(rb_str_new2("@"), name)), value);
+ }
+ return Qnil;
+}
+
+static void generate_json_object(FBuffer *buffer, VALUE Vstate, JSON_Generator_State *state, VALUE obj)
+{
+ char *object_nl = state->object_nl;
+ long object_nl_len = state->object_nl_len;
+ char *indent = state->indent;
+ long indent_len = state->indent_len;
+ long max_nesting = state->max_nesting;
+ char *delim = FBUFFER_PTR(state->object_delim);
+ long delim_len = FBUFFER_LEN(state->object_delim);
+ char *delim2 = FBUFFER_PTR(state->object_delim2);
+ long delim2_len = FBUFFER_LEN(state->object_delim2);
+ long depth = ++state->depth;
+ int i, j;
+ VALUE key, key_to_s, keys;
+ if (max_nesting != 0 && depth > max_nesting) {
+ fbuffer_free(buffer);
+ rb_raise(eNestingError, "nesting of %ld is too deep", --state->depth);
+ }
+ fbuffer_append_char(buffer, '{');
+ keys = rb_funcall(obj, i_keys, 0);
+ for(i = 0; i < RARRAY_LEN(keys); i++) {
+ if (i > 0) fbuffer_append(buffer, delim, delim_len);
+ if (object_nl) {
+ fbuffer_append(buffer, object_nl, object_nl_len);
+ }
+ if (indent) {
+ for (j = 0; j < depth; j++) {
+ fbuffer_append(buffer, indent, indent_len);
+ }
+ }
+ key = rb_ary_entry(keys, i);
+ key_to_s = rb_funcall(key, i_to_s, 0);
+ Check_Type(key_to_s, T_STRING);
+ generate_json(buffer, Vstate, state, key_to_s);
+ fbuffer_append(buffer, delim2, delim2_len);
+ generate_json(buffer, Vstate, state, rb_hash_aref(obj, key));
+ }
+ depth = --state->depth;
+ if (object_nl) {
+ fbuffer_append(buffer, object_nl, object_nl_len);
+ if (indent) {
+ for (j = 0; j < depth; j++) {
+ fbuffer_append(buffer, indent, indent_len);
+ }
+ }
+ }
+ fbuffer_append_char(buffer, '}');
+}
+
+static void generate_json_array(FBuffer *buffer, VALUE Vstate, JSON_Generator_State *state, VALUE obj)
+{
+ char *array_nl = state->array_nl;
+ long array_nl_len = state->array_nl_len;
+ char *indent = state->indent;
+ long indent_len = state->indent_len;
+ long max_nesting = state->max_nesting;
+ char *delim = FBUFFER_PTR(state->array_delim);
+ long delim_len = FBUFFER_LEN(state->array_delim);
+ long depth = ++state->depth;
+ int i, j;
+ if (max_nesting != 0 && depth > max_nesting) {
+ fbuffer_free(buffer);
+ rb_raise(eNestingError, "nesting of %ld is too deep", --state->depth);
+ }
+ fbuffer_append_char(buffer, '[');
+ if (array_nl) fbuffer_append(buffer, array_nl, array_nl_len);
+ for(i = 0; i < RARRAY_LEN(obj); i++) {
+ if (i > 0) fbuffer_append(buffer, delim, delim_len);
+ if (indent) {
+ for (j = 0; j < depth; j++) {
+ fbuffer_append(buffer, indent, indent_len);
+ }
+ }
+ generate_json(buffer, Vstate, state, rb_ary_entry(obj, i));
+ }
+ state->depth = --depth;
+ if (array_nl) {
+ fbuffer_append(buffer, array_nl, array_nl_len);
+ if (indent) {
+ for (j = 0; j < depth; j++) {
+ fbuffer_append(buffer, indent, indent_len);
+ }
+ }
+ }
+ fbuffer_append_char(buffer, ']');
+}
+
+static void generate_json_string(FBuffer *buffer, VALUE Vstate, JSON_Generator_State *state, VALUE obj)
+{
+ fbuffer_append_char(buffer, '"');
+#ifdef HAVE_RUBY_ENCODING_H
+ obj = rb_funcall(obj, i_encode, 1, CEncoding_UTF_8);
+#endif
+ if (state->ascii_only) {
+ convert_UTF8_to_JSON_ASCII(buffer, obj);
+ } else {
+ convert_UTF8_to_JSON(buffer, obj);
+ }
+ fbuffer_append_char(buffer, '"');
+}
+
+static void generate_json_null(FBuffer *buffer, VALUE Vstate, JSON_Generator_State *state, VALUE obj)
+{
+ fbuffer_append(buffer, "null", 4);
+}
+
+static void generate_json_false(FBuffer *buffer, VALUE Vstate, JSON_Generator_State *state, VALUE obj)
+{
+ fbuffer_append(buffer, "false", 5);
+}
+
+static void generate_json_true(FBuffer *buffer, VALUE Vstate, JSON_Generator_State *state, VALUE obj)
+{
+ fbuffer_append(buffer, "true", 4);
+}
+
+static void generate_json_fixnum(FBuffer *buffer, VALUE Vstate, JSON_Generator_State *state, VALUE obj)
+{
+ fbuffer_append_long(buffer, FIX2LONG(obj));
+}
+
+static void generate_json_bignum(FBuffer *buffer, VALUE Vstate, JSON_Generator_State *state, VALUE obj)
+{
+ VALUE tmp = rb_funcall(obj, i_to_s, 0);
+ fbuffer_append_str(buffer, tmp);
+}
+
+static void generate_json_float(FBuffer *buffer, VALUE Vstate, JSON_Generator_State *state, VALUE obj)
+{
+ double value = RFLOAT_VALUE(obj);
+ char allow_nan = state->allow_nan;
+ VALUE tmp = rb_funcall(obj, i_to_s, 0);
+ if (!allow_nan) {
+ if (isinf(value)) {
+ fbuffer_free(buffer);
+ rb_raise(eGeneratorError, "%u: %"PRIsVALUE" not allowed in JSON", __LINE__, RB_OBJ_STRING(tmp));
+ } else if (isnan(value)) {
+ fbuffer_free(buffer);
+ rb_raise(eGeneratorError, "%u: %"PRIsVALUE" not allowed in JSON", __LINE__, RB_OBJ_STRING(tmp));
+ }
+ }
+ fbuffer_append_str(buffer, tmp);
+}
+
+static void generate_json(FBuffer *buffer, VALUE Vstate, JSON_Generator_State *state, VALUE obj)
+{
+ VALUE tmp;
+ VALUE klass = CLASS_OF(obj);
+ if (klass == rb_cHash) {
+ generate_json_object(buffer, Vstate, state, obj);
+ } else if (klass == rb_cArray) {
+ generate_json_array(buffer, Vstate, state, obj);
+ } else if (klass == rb_cString) {
+ generate_json_string(buffer, Vstate, state, obj);
+ } else if (obj == Qnil) {
+ generate_json_null(buffer, Vstate, state, obj);
+ } else if (obj == Qfalse) {
+ generate_json_false(buffer, Vstate, state, obj);
+ } else if (obj == Qtrue) {
+ generate_json_true(buffer, Vstate, state, obj);
+ } else if (klass == rb_cFixnum) {
+ generate_json_fixnum(buffer, Vstate, state, obj);
+ } else if (klass == rb_cBignum) {
+ generate_json_bignum(buffer, Vstate, state, obj);
+ } else if (klass == rb_cFloat) {
+ generate_json_float(buffer, Vstate, state, obj);
+ } else if (rb_respond_to(obj, i_to_json)) {
+ tmp = rb_funcall(obj, i_to_json, 1, Vstate);
+ Check_Type(tmp, T_STRING);
+ fbuffer_append_str(buffer, tmp);
+ } else {
+ tmp = rb_funcall(obj, i_to_s, 0);
+ Check_Type(tmp, T_STRING);
+ generate_json_string(buffer, Vstate, state, tmp);
+ }
+}
+
+static FBuffer *cState_prepare_buffer(VALUE self)
+{
+ FBuffer *buffer;
+ GET_STATE(self);
+ buffer = fbuffer_alloc(state->buffer_initial_length);
+
+ if (state->object_delim) {
+ fbuffer_clear(state->object_delim);
+ } else {
+ state->object_delim = fbuffer_alloc(16);
+ }
+ fbuffer_append_char(state->object_delim, ',');
+ if (state->object_delim2) {
+ fbuffer_clear(state->object_delim2);
+ } else {
+ state->object_delim2 = fbuffer_alloc(16);
+ }
+ if (state->space_before) fbuffer_append(state->object_delim2, state->space_before, state->space_before_len);
+ fbuffer_append_char(state->object_delim2, ':');
+ if (state->space) fbuffer_append(state->object_delim2, state->space, state->space_len);
+
+ if (state->array_delim) {
+ fbuffer_clear(state->array_delim);
+ } else {
+ state->array_delim = fbuffer_alloc(16);
+ }
+ fbuffer_append_char(state->array_delim, ',');
+ if (state->array_nl) fbuffer_append(state->array_delim, state->array_nl, state->array_nl_len);
+ return buffer;
+}
+
+static VALUE cState_partial_generate(VALUE self, VALUE obj)
+{
+ FBuffer *buffer = cState_prepare_buffer(self);
+ GET_STATE(self);
+ generate_json(buffer, self, state, obj);
+ return fbuffer_to_s(buffer);
+}
+
+/*
+ * This function returns true if string is either a JSON array or JSON object.
+ * It might suffer from false positives, e. g. syntactically incorrect JSON in
+ * the string or certain UTF-8 characters on the right hand side.
+ */
+static int isArrayOrObject(VALUE string)
+{
+ long string_len = RSTRING_LEN(string);
+ char *p = RSTRING_PTR(string), *q = p + string_len - 1;
+ if (string_len < 2) return 0;
+ for (; p < q && isspace((unsigned char)*p); p++);
+ for (; q > p && isspace((unsigned char)*q); q--);
+ return (*p == '[' && *q == ']') || (*p == '{' && *q == '}');
+}
+
+/*
+ * call-seq: generate(obj)
+ *
+ * Generates a valid JSON document from object +obj+ and returns the
+ * result. If no valid JSON document can be created this method raises a
+ * GeneratorError exception.
+ */
+static VALUE cState_generate(VALUE self, VALUE obj)
+{
+ VALUE result = cState_partial_generate(self, obj);
+ GET_STATE(self);
+ if (!state->quirks_mode && !isArrayOrObject(result)) {
+ rb_raise(eGeneratorError, "only generation of JSON objects or arrays allowed");
+ }
+ return result;
+}
+
+/*
+ * call-seq: new(opts = {})
+ *
+ * Instantiates a new State object, configured by _opts_.
+ *
+ * _opts_ can have the following keys:
+ *
+ * * *indent*: a string used to indent levels (default: ''),
+ * * *space*: a string that is put after, a : or , delimiter (default: ''),
+ * * *space_before*: a string that is put before a : pair delimiter (default: ''),
+ * * *object_nl*: a string that is put at the end of a JSON object (default: ''),
+ * * *array_nl*: a string that is put at the end of a JSON array (default: ''),
+ * * *allow_nan*: true if NaN, Infinity, and -Infinity should be
+ * generated, otherwise an exception is thrown, if these values are
+ * encountered. This options defaults to false.
+ * * *quirks_mode*: Enables quirks_mode for parser, that is for example
+ * generating single JSON values instead of documents is possible.
+ * * *buffer_initial_length*: sets the initial length of the generator's
+ * internal buffer.
+ */
+static VALUE cState_initialize(int argc, VALUE *argv, VALUE self)
+{
+ VALUE opts;
+ GET_STATE(self);
+ state->max_nesting = 100;
+ state->buffer_initial_length = FBUFFER_INITIAL_LENGTH_DEFAULT;
+ rb_scan_args(argc, argv, "01", &opts);
+ if (!NIL_P(opts)) cState_configure(self, opts);
+ return self;
+}
+
+/*
+ * call-seq: initialize_copy(orig)
+ *
+ * Initializes this object from orig if it can be duplicated/cloned and returns
+ * it.
+*/
+static VALUE cState_init_copy(VALUE obj, VALUE orig)
+{
+ JSON_Generator_State *objState, *origState;
+
+ if (obj == orig) return obj;
+ GET_STATE_TO(obj, objState);
+ GET_STATE_TO(orig, origState);
+ if (!objState) rb_raise(rb_eArgError, "unallocated JSON::State");
+
+ MEMCPY(objState, origState, JSON_Generator_State, 1);
+ objState->indent = fstrndup(origState->indent, origState->indent_len);
+ objState->space = fstrndup(origState->space, origState->space_len);
+ objState->space_before = fstrndup(origState->space_before, origState->space_before_len);
+ objState->object_nl = fstrndup(origState->object_nl, origState->object_nl_len);
+ objState->array_nl = fstrndup(origState->array_nl, origState->array_nl_len);
+ if (origState->array_delim) objState->array_delim = fbuffer_dup(origState->array_delim);
+ if (origState->object_delim) objState->object_delim = fbuffer_dup(origState->object_delim);
+ if (origState->object_delim2) objState->object_delim2 = fbuffer_dup(origState->object_delim2);
+ return obj;
+}
+
+/*
+ * call-seq: from_state(opts)
+ *
+ * Creates a State object from _opts_, which ought to be Hash to create a
+ * new State instance configured by _opts_, something else to create an
+ * unconfigured instance. If _opts_ is a State object, it is just returned.
+ */
+static VALUE cState_from_state_s(VALUE self, VALUE opts)
+{
+ if (rb_obj_is_kind_of(opts, self)) {
+ return opts;
+ } else if (rb_obj_is_kind_of(opts, rb_cHash)) {
+ return rb_funcall(self, i_new, 1, opts);
+ } else {
+ if (NIL_P(CJSON_SAFE_STATE_PROTOTYPE)) {
+ CJSON_SAFE_STATE_PROTOTYPE = rb_const_get(mJSON, i_SAFE_STATE_PROTOTYPE);
+ }
+ return rb_funcall(CJSON_SAFE_STATE_PROTOTYPE, i_dup, 0);
+ }
+}
+
+/*
+ * call-seq: indent()
+ *
+ * Returns the string that is used to indent levels in the JSON text.
+ */
+static VALUE cState_indent(VALUE self)
+{
+ GET_STATE(self);
+ return state->indent ? rb_str_new(state->indent, state->indent_len) : rb_str_new2("");
+}
+
+/*
+ * call-seq: indent=(indent)
+ *
+ * Sets the string that is used to indent levels in the JSON text.
+ */
+static VALUE cState_indent_set(VALUE self, VALUE indent)
+{
+ unsigned long len;
+ GET_STATE(self);
+ Check_Type(indent, T_STRING);
+ len = RSTRING_LEN(indent);
+ if (len == 0) {
+ if (state->indent) {
+ ruby_xfree(state->indent);
+ state->indent = NULL;
+ state->indent_len = 0;
+ }
+ } else {
+ if (state->indent) ruby_xfree(state->indent);
+ state->indent = strdup(RSTRING_PTR(indent));
+ state->indent_len = len;
+ }
+ return Qnil;
+}
+
+/*
+ * call-seq: space()
+ *
+ * Returns the string that is used to insert a space between the tokens in a JSON
+ * string.
+ */
+static VALUE cState_space(VALUE self)
+{
+ GET_STATE(self);
+ return state->space ? rb_str_new(state->space, state->space_len) : rb_str_new2("");
+}
+
+/*
+ * call-seq: space=(space)
+ *
+ * Sets _space_ to the string that is used to insert a space between the tokens in a JSON
+ * string.
+ */
+static VALUE cState_space_set(VALUE self, VALUE space)
+{
+ unsigned long len;
+ GET_STATE(self);
+ Check_Type(space, T_STRING);
+ len = RSTRING_LEN(space);
+ if (len == 0) {
+ if (state->space) {
+ ruby_xfree(state->space);
+ state->space = NULL;
+ state->space_len = 0;
+ }
+ } else {
+ if (state->space) ruby_xfree(state->space);
+ state->space = strdup(RSTRING_PTR(space));
+ state->space_len = len;
+ }
+ return Qnil;
+}
+
+/*
+ * call-seq: space_before()
+ *
+ * Returns the string that is used to insert a space before the ':' in JSON objects.
+ */
+static VALUE cState_space_before(VALUE self)
+{
+ GET_STATE(self);
+ return state->space_before ? rb_str_new(state->space_before, state->space_before_len) : rb_str_new2("");
+}
+
+/*
+ * call-seq: space_before=(space_before)
+ *
+ * Sets the string that is used to insert a space before the ':' in JSON objects.
+ */
+static VALUE cState_space_before_set(VALUE self, VALUE space_before)
+{
+ unsigned long len;
+ GET_STATE(self);
+ Check_Type(space_before, T_STRING);
+ len = RSTRING_LEN(space_before);
+ if (len == 0) {
+ if (state->space_before) {
+ ruby_xfree(state->space_before);
+ state->space_before = NULL;
+ state->space_before_len = 0;
+ }
+ } else {
+ if (state->space_before) ruby_xfree(state->space_before);
+ state->space_before = strdup(RSTRING_PTR(space_before));
+ state->space_before_len = len;
+ }
+ return Qnil;
+}
+
+/*
+ * call-seq: object_nl()
+ *
+ * This string is put at the end of a line that holds a JSON object (or
+ * Hash).
+ */
+static VALUE cState_object_nl(VALUE self)
+{
+ GET_STATE(self);
+ return state->object_nl ? rb_str_new(state->object_nl, state->object_nl_len) : rb_str_new2("");
+}
+
+/*
+ * call-seq: object_nl=(object_nl)
+ *
+ * This string is put at the end of a line that holds a JSON object (or
+ * Hash).
+ */
+static VALUE cState_object_nl_set(VALUE self, VALUE object_nl)
+{
+ unsigned long len;
+ GET_STATE(self);
+ Check_Type(object_nl, T_STRING);
+ len = RSTRING_LEN(object_nl);
+ if (len == 0) {
+ if (state->object_nl) {
+ ruby_xfree(state->object_nl);
+ state->object_nl = NULL;
+ }
+ } else {
+ if (state->object_nl) ruby_xfree(state->object_nl);
+ state->object_nl = strdup(RSTRING_PTR(object_nl));
+ state->object_nl_len = len;
+ }
+ return Qnil;
+}
+
+/*
+ * call-seq: array_nl()
+ *
+ * This string is put at the end of a line that holds a JSON array.
+ */
+static VALUE cState_array_nl(VALUE self)
+{
+ GET_STATE(self);
+ return state->array_nl ? rb_str_new(state->array_nl, state->array_nl_len) : rb_str_new2("");
+}
+
+/*
+ * call-seq: array_nl=(array_nl)
+ *
+ * This string is put at the end of a line that holds a JSON array.
+ */
+static VALUE cState_array_nl_set(VALUE self, VALUE array_nl)
+{
+ unsigned long len;
+ GET_STATE(self);
+ Check_Type(array_nl, T_STRING);
+ len = RSTRING_LEN(array_nl);
+ if (len == 0) {
+ if (state->array_nl) {
+ ruby_xfree(state->array_nl);
+ state->array_nl = NULL;
+ }
+ } else {
+ if (state->array_nl) ruby_xfree(state->array_nl);
+ state->array_nl = strdup(RSTRING_PTR(array_nl));
+ state->array_nl_len = len;
+ }
+ return Qnil;
+}
+
+
+/*
+* call-seq: check_circular?
+*
+* Returns true, if circular data structures should be checked,
+* otherwise returns false.
+*/
+static VALUE cState_check_circular_p(VALUE self)
+{
+ GET_STATE(self);
+ return state->max_nesting ? Qtrue : Qfalse;
+}
+
+/*
+ * call-seq: max_nesting
+ *
+ * This integer returns the maximum level of data structure nesting in
+ * the generated JSON, max_nesting = 0 if no maximum is checked.
+ */
+static VALUE cState_max_nesting(VALUE self)
+{
+ GET_STATE(self);
+ return LONG2FIX(state->max_nesting);
+}
+
+/*
+ * call-seq: max_nesting=(depth)
+ *
+ * This sets the maximum level of data structure nesting in the generated JSON
+ * to the integer depth, max_nesting = 0 if no maximum should be checked.
+ */
+static VALUE cState_max_nesting_set(VALUE self, VALUE depth)
+{
+ GET_STATE(self);
+ Check_Type(depth, T_FIXNUM);
+ return state->max_nesting = FIX2LONG(depth);
+}
+
+/*
+ * call-seq: allow_nan?
+ *
+ * Returns true, if NaN, Infinity, and -Infinity should be generated, otherwise
+ * returns false.
+ */
+static VALUE cState_allow_nan_p(VALUE self)
+{
+ GET_STATE(self);
+ return state->allow_nan ? Qtrue : Qfalse;
+}
+
+/*
+ * call-seq: ascii_only?
+ *
+ * Returns true, if NaN, Infinity, and -Infinity should be generated, otherwise
+ * returns false.
+ */
+static VALUE cState_ascii_only_p(VALUE self)
+{
+ GET_STATE(self);
+ return state->ascii_only ? Qtrue : Qfalse;
+}
+
+/*
+ * call-seq: quirks_mode?
+ *
+ * Returns true, if quirks mode is enabled. Otherwise returns false.
+ */
+static VALUE cState_quirks_mode_p(VALUE self)
+{
+ GET_STATE(self);
+ return state->quirks_mode ? Qtrue : Qfalse;
+}
+
+/*
+ * call-seq: quirks_mode=(enable)
+ *
+ * If set to true, enables the quirks_mode mode.
+ */
+static VALUE cState_quirks_mode_set(VALUE self, VALUE enable)
+{
+ GET_STATE(self);
+ state->quirks_mode = RTEST(enable);
+ return Qnil;
+}
+
+/*
+ * call-seq: depth
+ *
+ * This integer returns the current depth of data structure nesting.
+ */
+static VALUE cState_depth(VALUE self)
+{
+ GET_STATE(self);
+ return LONG2FIX(state->depth);
+}
+
+/*
+ * call-seq: depth=(depth)
+ *
+ * This sets the maximum level of data structure nesting in the generated JSON
+ * to the integer depth, max_nesting = 0 if no maximum should be checked.
+ */
+static VALUE cState_depth_set(VALUE self, VALUE depth)
+{
+ GET_STATE(self);
+ Check_Type(depth, T_FIXNUM);
+ state->depth = FIX2LONG(depth);
+ return Qnil;
+}
+
+/*
+ * call-seq: buffer_initial_length
+ *
+ * This integer returns the current initial length of the buffer.
+ */
+static VALUE cState_buffer_initial_length(VALUE self)
+{
+ GET_STATE(self);
+ return LONG2FIX(state->buffer_initial_length);
+}
+
+/*
+ * call-seq: buffer_initial_length=(length)
+ *
+ * This sets the initial length of the buffer to +length+, if +length+ > 0,
+ * otherwise its value isn't changed.
+ */
+static VALUE cState_buffer_initial_length_set(VALUE self, VALUE buffer_initial_length)
+{
+ long initial_length;
+ GET_STATE(self);
+ Check_Type(buffer_initial_length, T_FIXNUM);
+ initial_length = FIX2LONG(buffer_initial_length);
+ if (initial_length > 0) {
+ state->buffer_initial_length = initial_length;
+ }
+ return Qnil;
+}
+
+/*
+ *
+ */
+void Init_generator(void)
+{
+ rb_require("json/common");
+
+ mJSON = rb_define_module("JSON");
+ mExt = rb_define_module_under(mJSON, "Ext");
+ mGenerator = rb_define_module_under(mExt, "Generator");
+
+ eGeneratorError = rb_path2class("JSON::GeneratorError");
+ eNestingError = rb_path2class("JSON::NestingError");
+
+ cState = rb_define_class_under(mGenerator, "State", rb_cObject);
+ rb_define_alloc_func(cState, cState_s_allocate);
+ rb_define_singleton_method(cState, "from_state", cState_from_state_s, 1);
+ rb_define_method(cState, "initialize", cState_initialize, -1);
+ rb_define_method(cState, "initialize_copy", cState_init_copy, 1);
+ rb_define_method(cState, "indent", cState_indent, 0);
+ rb_define_method(cState, "indent=", cState_indent_set, 1);
+ rb_define_method(cState, "space", cState_space, 0);
+ rb_define_method(cState, "space=", cState_space_set, 1);
+ rb_define_method(cState, "space_before", cState_space_before, 0);
+ rb_define_method(cState, "space_before=", cState_space_before_set, 1);
+ rb_define_method(cState, "object_nl", cState_object_nl, 0);
+ rb_define_method(cState, "object_nl=", cState_object_nl_set, 1);
+ rb_define_method(cState, "array_nl", cState_array_nl, 0);
+ rb_define_method(cState, "array_nl=", cState_array_nl_set, 1);
+ rb_define_method(cState, "max_nesting", cState_max_nesting, 0);
+ rb_define_method(cState, "max_nesting=", cState_max_nesting_set, 1);
+ rb_define_method(cState, "check_circular?", cState_check_circular_p, 0);
+ rb_define_method(cState, "allow_nan?", cState_allow_nan_p, 0);
+ rb_define_method(cState, "ascii_only?", cState_ascii_only_p, 0);
+ rb_define_method(cState, "quirks_mode?", cState_quirks_mode_p, 0);
+ rb_define_method(cState, "quirks_mode", cState_quirks_mode_p, 0);
+ rb_define_method(cState, "quirks_mode=", cState_quirks_mode_set, 1);
+ rb_define_method(cState, "depth", cState_depth, 0);
+ rb_define_method(cState, "depth=", cState_depth_set, 1);
+ rb_define_method(cState, "buffer_initial_length", cState_buffer_initial_length, 0);
+ rb_define_method(cState, "buffer_initial_length=", cState_buffer_initial_length_set, 1);
+ rb_define_method(cState, "configure", cState_configure, 1);
+ rb_define_alias(cState, "merge", "configure");
+ rb_define_method(cState, "to_h", cState_to_h, 0);
+ rb_define_alias(cState, "to_hash", "to_h");
+ rb_define_method(cState, "[]", cState_aref, 1);
+ rb_define_method(cState, "[]=", cState_aset, 2);
+ rb_define_method(cState, "generate", cState_generate, 1);
+
+ mGeneratorMethods = rb_define_module_under(mGenerator, "GeneratorMethods");
+ mObject = rb_define_module_under(mGeneratorMethods, "Object");
+ rb_define_method(mObject, "to_json", mObject_to_json, -1);
+ mHash = rb_define_module_under(mGeneratorMethods, "Hash");
+ rb_define_method(mHash, "to_json", mHash_to_json, -1);
+ mArray = rb_define_module_under(mGeneratorMethods, "Array");
+ rb_define_method(mArray, "to_json", mArray_to_json, -1);
+ mFixnum = rb_define_module_under(mGeneratorMethods, "Fixnum");
+ rb_define_method(mFixnum, "to_json", mFixnum_to_json, -1);
+ mBignum = rb_define_module_under(mGeneratorMethods, "Bignum");
+ rb_define_method(mBignum, "to_json", mBignum_to_json, -1);
+ mFloat = rb_define_module_under(mGeneratorMethods, "Float");
+ rb_define_method(mFloat, "to_json", mFloat_to_json, -1);
+ mString = rb_define_module_under(mGeneratorMethods, "String");
+ rb_define_singleton_method(mString, "included", mString_included_s, 1);
+ rb_define_method(mString, "to_json", mString_to_json, -1);
+ rb_define_method(mString, "to_json_raw", mString_to_json_raw, -1);
+ rb_define_method(mString, "to_json_raw_object", mString_to_json_raw_object, 0);
+ mString_Extend = rb_define_module_under(mString, "Extend");
+ rb_define_method(mString_Extend, "json_create", mString_Extend_json_create, 1);
+ mTrueClass = rb_define_module_under(mGeneratorMethods, "TrueClass");
+ rb_define_method(mTrueClass, "to_json", mTrueClass_to_json, -1);
+ mFalseClass = rb_define_module_under(mGeneratorMethods, "FalseClass");
+ rb_define_method(mFalseClass, "to_json", mFalseClass_to_json, -1);
+ mNilClass = rb_define_module_under(mGeneratorMethods, "NilClass");
+ rb_define_method(mNilClass, "to_json", mNilClass_to_json, -1);
+
+ CRegexp_MULTILINE = rb_const_get(rb_cRegexp, rb_intern("MULTILINE"));
+ i_to_s = rb_intern("to_s");
+ i_to_json = rb_intern("to_json");
+ i_new = rb_intern("new");
+ i_indent = rb_intern("indent");
+ i_space = rb_intern("space");
+ i_space_before = rb_intern("space_before");
+ i_object_nl = rb_intern("object_nl");
+ i_array_nl = rb_intern("array_nl");
+ i_max_nesting = rb_intern("max_nesting");
+ i_allow_nan = rb_intern("allow_nan");
+ i_ascii_only = rb_intern("ascii_only");
+ i_quirks_mode = rb_intern("quirks_mode");
+ i_depth = rb_intern("depth");
+ i_buffer_initial_length = rb_intern("buffer_initial_length");
+ i_pack = rb_intern("pack");
+ i_unpack = rb_intern("unpack");
+ i_create_id = rb_intern("create_id");
+ i_extend = rb_intern("extend");
+ i_key_p = rb_intern("key?");
+ i_aref = rb_intern("[]");
+ i_send = rb_intern("__send__");
+ i_respond_to_p = rb_intern("respond_to?");
+ i_match = rb_intern("match");
+ i_keys = rb_intern("keys");
+ i_dup = rb_intern("dup");
+#ifdef HAVE_RUBY_ENCODING_H
+ CEncoding_UTF_8 = rb_funcall(rb_path2class("Encoding"), rb_intern("find"), 1, rb_str_new2("utf-8"));
+ i_encoding = rb_intern("encoding");
+ i_encode = rb_intern("encode");
+#endif
+ i_SAFE_STATE_PROTOTYPE = rb_intern("SAFE_STATE_PROTOTYPE");
+ CJSON_SAFE_STATE_PROTOTYPE = Qnil;
+}
diff --git a/ext/json/generator/generator.h b/ext/json/generator/generator.h
new file mode 100644
index 0000000000..416159a9c5
--- /dev/null
+++ b/ext/json/generator/generator.h
@@ -0,0 +1,167 @@
+#ifndef _GENERATOR_H_
+#define _GENERATOR_H_
+
+#include <string.h>
+#include <math.h>
+#include <ctype.h>
+
+#include "ruby.h"
+
+#ifdef HAVE_RUBY_RE_H
+#include "ruby/re.h"
+#else
+#include "re.h"
+#endif
+
+#ifndef rb_intern_str
+#define rb_intern_str(string) SYM2ID(rb_str_intern(string))
+#endif
+
+#ifndef rb_obj_instance_variables
+#define rb_obj_instance_variables(object) rb_funcall(object, rb_intern("instance_variables"), 0)
+#endif
+
+#define option_given_p(opts, key) RTEST(rb_funcall(opts, i_key_p, 1, key))
+
+/* unicode definitions */
+
+#define UNI_STRICT_CONVERSION 1
+
+typedef unsigned long UTF32; /* at least 32 bits */
+typedef unsigned short UTF16; /* at least 16 bits */
+typedef unsigned char UTF8; /* typically 8 bits */
+
+#define UNI_REPLACEMENT_CHAR (UTF32)0x0000FFFD
+#define UNI_MAX_BMP (UTF32)0x0000FFFF
+#define UNI_MAX_UTF16 (UTF32)0x0010FFFF
+#define UNI_MAX_UTF32 (UTF32)0x7FFFFFFF
+#define UNI_MAX_LEGAL_UTF32 (UTF32)0x0010FFFF
+
+#define UNI_SUR_HIGH_START (UTF32)0xD800
+#define UNI_SUR_HIGH_END (UTF32)0xDBFF
+#define UNI_SUR_LOW_START (UTF32)0xDC00
+#define UNI_SUR_LOW_END (UTF32)0xDFFF
+
+static const int halfShift = 10; /* used for shifting by 10 bits */
+
+static const UTF32 halfBase = 0x0010000UL;
+static const UTF32 halfMask = 0x3FFUL;
+
+static unsigned char isLegalUTF8(const UTF8 *source, unsigned long length);
+static void unicode_escape(char *buf, UTF16 character);
+static void unicode_escape_to_buffer(FBuffer *buffer, char buf[6], UTF16 character);
+static void convert_UTF8_to_JSON_ASCII(FBuffer *buffer, VALUE string);
+static void convert_UTF8_to_JSON(FBuffer *buffer, VALUE string);
+static char *fstrndup(const char *ptr, unsigned long len);
+
+/* ruby api and some helpers */
+
+typedef struct JSON_Generator_StateStruct {
+ char *indent;
+ long indent_len;
+ char *space;
+ long space_len;
+ char *space_before;
+ long space_before_len;
+ char *object_nl;
+ long object_nl_len;
+ char *array_nl;
+ long array_nl_len;
+ FBuffer *array_delim;
+ FBuffer *object_delim;
+ FBuffer *object_delim2;
+ long max_nesting;
+ char allow_nan;
+ char ascii_only;
+ char quirks_mode;
+ long depth;
+ long buffer_initial_length;
+} JSON_Generator_State;
+
+#define GET_STATE_TO(self, state) \
+ TypedData_Get_Struct(self, JSON_Generator_State, &JSON_Generator_State_type, state)
+
+#define GET_STATE(self) \
+ JSON_Generator_State *state; \
+ GET_STATE_TO(self, state)
+
+#define GENERATE_JSON(type) \
+ FBuffer *buffer; \
+ VALUE Vstate; \
+ JSON_Generator_State *state; \
+ \
+ rb_scan_args(argc, argv, "01", &Vstate); \
+ Vstate = cState_from_state_s(cState, Vstate); \
+ TypedData_Get_Struct(Vstate, JSON_Generator_State, &JSON_Generator_State_type, state); \
+ buffer = cState_prepare_buffer(Vstate); \
+ generate_json_##type(buffer, Vstate, state, self); \
+ return fbuffer_to_s(buffer)
+
+static VALUE mHash_to_json(int argc, VALUE *argv, VALUE self);
+static VALUE mArray_to_json(int argc, VALUE *argv, VALUE self);
+static VALUE mFixnum_to_json(int argc, VALUE *argv, VALUE self);
+static VALUE mBignum_to_json(int argc, VALUE *argv, VALUE self);
+static VALUE mFloat_to_json(int argc, VALUE *argv, VALUE self);
+static VALUE mString_included_s(VALUE self, VALUE modul);
+static VALUE mString_to_json(int argc, VALUE *argv, VALUE self);
+static VALUE mString_to_json_raw_object(VALUE self);
+static VALUE mString_to_json_raw(int argc, VALUE *argv, VALUE self);
+static VALUE mString_Extend_json_create(VALUE self, VALUE o);
+static VALUE mTrueClass_to_json(int argc, VALUE *argv, VALUE self);
+static VALUE mFalseClass_to_json(int argc, VALUE *argv, VALUE self);
+static VALUE mNilClass_to_json(int argc, VALUE *argv, VALUE self);
+static VALUE mObject_to_json(int argc, VALUE *argv, VALUE self);
+static void State_free(void *state);
+static JSON_Generator_State *State_allocate(void);
+static VALUE cState_s_allocate(VALUE klass);
+static VALUE cState_configure(VALUE self, VALUE opts);
+static VALUE cState_to_h(VALUE self);
+static void generate_json(FBuffer *buffer, VALUE Vstate, JSON_Generator_State *state, VALUE obj);
+static void generate_json_object(FBuffer *buffer, VALUE Vstate, JSON_Generator_State *state, VALUE obj);
+static void generate_json_array(FBuffer *buffer, VALUE Vstate, JSON_Generator_State *state, VALUE obj);
+static void generate_json_string(FBuffer *buffer, VALUE Vstate, JSON_Generator_State *state, VALUE obj);
+static void generate_json_null(FBuffer *buffer, VALUE Vstate, JSON_Generator_State *state, VALUE obj);
+static void generate_json_false(FBuffer *buffer, VALUE Vstate, JSON_Generator_State *state, VALUE obj);
+static void generate_json_true(FBuffer *buffer, VALUE Vstate, JSON_Generator_State *state, VALUE obj);
+static void generate_json_fixnum(FBuffer *buffer, VALUE Vstate, JSON_Generator_State *state, VALUE obj);
+static void generate_json_bignum(FBuffer *buffer, VALUE Vstate, JSON_Generator_State *state, VALUE obj);
+static void generate_json_float(FBuffer *buffer, VALUE Vstate, JSON_Generator_State *state, VALUE obj);
+static VALUE cState_partial_generate(VALUE self, VALUE obj);
+static VALUE cState_generate(VALUE self, VALUE obj);
+static VALUE cState_initialize(int argc, VALUE *argv, VALUE self);
+static VALUE cState_from_state_s(VALUE self, VALUE opts);
+static VALUE cState_indent(VALUE self);
+static VALUE cState_indent_set(VALUE self, VALUE indent);
+static VALUE cState_space(VALUE self);
+static VALUE cState_space_set(VALUE self, VALUE space);
+static VALUE cState_space_before(VALUE self);
+static VALUE cState_space_before_set(VALUE self, VALUE space_before);
+static VALUE cState_object_nl(VALUE self);
+static VALUE cState_object_nl_set(VALUE self, VALUE object_nl);
+static VALUE cState_array_nl(VALUE self);
+static VALUE cState_array_nl_set(VALUE self, VALUE array_nl);
+static VALUE cState_max_nesting(VALUE self);
+static VALUE cState_max_nesting_set(VALUE self, VALUE depth);
+static VALUE cState_allow_nan_p(VALUE self);
+static VALUE cState_ascii_only_p(VALUE self);
+static VALUE cState_depth(VALUE self);
+static VALUE cState_depth_set(VALUE self, VALUE depth);
+static FBuffer *cState_prepare_buffer(VALUE self);
+#ifndef ZALLOC
+#define ZALLOC(type) ((type *)ruby_zalloc(sizeof(type)))
+static inline void *ruby_zalloc(size_t n)
+{
+ void *p = ruby_xmalloc(n);
+ memset(p, 0, n);
+ return p;
+}
+#endif
+#ifdef TypedData_Wrap_Struct
+static const rb_data_type_t JSON_Generator_State_type;
+#define NEW_TYPEDDATA_WRAPPER 1
+#else
+#define TypedData_Wrap_Struct(klass, ignore, json) Data_Wrap_Struct(klass, NULL, State_free, json)
+#define TypedData_Get_Struct(self, JSON_Generator_State, ignore, json) Data_Get_Struct(self, JSON_Generator_State, json)
+#endif
+
+#endif
diff --git a/ext/json/lib/json.rb b/ext/json/lib/json.rb
new file mode 100644
index 0000000000..24aa385c91
--- /dev/null
+++ b/ext/json/lib/json.rb
@@ -0,0 +1,62 @@
+require 'json/common'
+
+##
+# = JavaScript Object Notation (JSON)
+#
+# JSON is a lightweight data-interchange format. It is easy for us
+# humans to read and write. Plus, equally simple for machines to generate or parse.
+# JSON is completely language agnostic, making it the ideal interchange format.
+#
+# Built on two universally available structures:
+# 1. A collection of name/value pairs. Often referred to as an _object_, hash table, record, struct, keyed list, or associative array.
+# 2. An ordered list of values. More commonly called an _array_, vector, sequence or list.
+#
+# To read more about JSON visit: http://json.org
+#
+# == Parsing JSON
+#
+# To parse a JSON string received by another application or generated within
+# your existing application:
+#
+# require 'json'
+#
+# my_hash = JSON.parse('{"hello": "goodbye"}')
+# puts my_hash["hello"] => "goodbye"
+#
+# Notice the extra quotes <tt>''</tt> around the hash notation. Ruby expects
+# the argument to be a string and can't convert objects like a hash or array.
+#
+# Ruby converts your string into a hash
+#
+# == Generating JSON
+#
+# Creating a JSON string for communication or serialization is
+# just as simple.
+#
+# require 'json'
+#
+# my_hash = {:hello => "goodbye"}
+# puts JSON.generate(my_hash) => "{\"hello\":\"goodbye\"}"
+#
+# Or an alternative way:
+#
+# require 'json'
+# puts {:hello => "goodbye"}.to_json => "{\"hello\":\"goodbye\"}"
+#
+# <tt>JSON.generate</tt> only allows objects or arrays to be converted
+# to JSON syntax. <tt>to_json</tt>, however, accepts many Ruby classes
+# even though it acts only as a method for serialization:
+#
+# require 'json'
+#
+# 1.to_json => "1"
+#
+module JSON
+ require 'json/version'
+
+ begin
+ require 'json/ext'
+ rescue LoadError
+ require 'json/pure'
+ end
+end
diff --git a/ext/json/lib/json/add/bigdecimal.rb b/ext/json/lib/json/add/bigdecimal.rb
new file mode 100644
index 0000000000..0ef69f12e0
--- /dev/null
+++ b/ext/json/lib/json/add/bigdecimal.rb
@@ -0,0 +1,28 @@
+unless defined?(::JSON::JSON_LOADED) and ::JSON::JSON_LOADED
+ require 'json'
+end
+defined?(::BigDecimal) or require 'bigdecimal'
+
+class BigDecimal
+ # Import a JSON Marshalled object.
+ #
+ # method used for JSON marshalling support.
+ def self.json_create(object)
+ BigDecimal._load object['b']
+ end
+
+ # Marshal the object to JSON.
+ #
+ # method used for JSON marshalling support.
+ def as_json(*)
+ {
+ JSON.create_id => self.class.name,
+ 'b' => _dump,
+ }
+ end
+
+ # return the JSON value
+ def to_json(*)
+ as_json.to_json
+ end
+end
diff --git a/ext/json/lib/json/add/complex.rb b/ext/json/lib/json/add/complex.rb
new file mode 100644
index 0000000000..3d653bb50d
--- /dev/null
+++ b/ext/json/lib/json/add/complex.rb
@@ -0,0 +1,28 @@
+unless defined?(::JSON::JSON_LOADED) and ::JSON::JSON_LOADED
+ require 'json'
+end
+defined?(::Complex) or require 'complex'
+
+class Complex
+
+ # Deserializes JSON string by converting Real value <tt>r</tt>, imaginary
+ # value <tt>i</tt>, to a Complex object.
+ def self.json_create(object)
+ Complex(object['r'], object['i'])
+ end
+
+ # Returns a hash, that will be turned into a JSON object and represent this
+ # object.
+ def as_json(*)
+ {
+ JSON.create_id => self.class.name,
+ 'r' => real,
+ 'i' => imag,
+ }
+ end
+
+ # Stores class name (Complex) along with real value <tt>r</tt> and imaginary value <tt>i</tt> as JSON string
+ def to_json(*)
+ as_json.to_json
+ end
+end
diff --git a/ext/json/lib/json/add/core.rb b/ext/json/lib/json/add/core.rb
new file mode 100644
index 0000000000..77d9dc0b20
--- /dev/null
+++ b/ext/json/lib/json/add/core.rb
@@ -0,0 +1,11 @@
+# This file requires the implementations of ruby core's custom objects for
+# serialisation/deserialisation.
+
+require 'json/add/date'
+require 'json/add/date_time'
+require 'json/add/exception'
+require 'json/add/range'
+require 'json/add/regexp'
+require 'json/add/struct'
+require 'json/add/symbol'
+require 'json/add/time'
diff --git a/ext/json/lib/json/add/date.rb b/ext/json/lib/json/add/date.rb
new file mode 100644
index 0000000000..4288237db1
--- /dev/null
+++ b/ext/json/lib/json/add/date.rb
@@ -0,0 +1,34 @@
+unless defined?(::JSON::JSON_LOADED) and ::JSON::JSON_LOADED
+ require 'json'
+end
+require 'date'
+
+# Date serialization/deserialization
+class Date
+
+ # Deserializes JSON string by converting Julian year <tt>y</tt>, month
+ # <tt>m</tt>, day <tt>d</tt> and Day of Calendar Reform <tt>sg</tt> to Date.
+ def self.json_create(object)
+ civil(*object.values_at('y', 'm', 'd', 'sg'))
+ end
+
+ alias start sg unless method_defined?(:start)
+
+ # Returns a hash, that will be turned into a JSON object and represent this
+ # object.
+ def as_json(*)
+ {
+ JSON.create_id => self.class.name,
+ 'y' => year,
+ 'm' => month,
+ 'd' => day,
+ 'sg' => start,
+ }
+ end
+
+ # Stores class name (Date) with Julian year <tt>y</tt>, month <tt>m</tt>, day
+ # <tt>d</tt> and Day of Calendar Reform <tt>sg</tt> as JSON string
+ def to_json(*args)
+ as_json.to_json(*args)
+ end
+end
diff --git a/ext/json/lib/json/add/date_time.rb b/ext/json/lib/json/add/date_time.rb
new file mode 100644
index 0000000000..5ea42ea656
--- /dev/null
+++ b/ext/json/lib/json/add/date_time.rb
@@ -0,0 +1,50 @@
+unless defined?(::JSON::JSON_LOADED) and ::JSON::JSON_LOADED
+ require 'json'
+end
+require 'date'
+
+# DateTime serialization/deserialization
+class DateTime
+
+ # Deserializes JSON string by converting year <tt>y</tt>, month <tt>m</tt>,
+ # day <tt>d</tt>, hour <tt>H</tt>, minute <tt>M</tt>, second <tt>S</tt>,
+ # offset <tt>of</tt> and Day of Calendar Reform <tt>sg</tt> to DateTime.
+ def self.json_create(object)
+ args = object.values_at('y', 'm', 'd', 'H', 'M', 'S')
+ of_a, of_b = object['of'].split('/')
+ if of_b and of_b != '0'
+ args << Rational(of_a.to_i, of_b.to_i)
+ else
+ args << of_a
+ end
+ args << object['sg']
+ civil(*args)
+ end
+
+ alias start sg unless method_defined?(:start)
+
+ # Returns a hash, that will be turned into a JSON object and represent this
+ # object.
+ def as_json(*)
+ {
+ JSON.create_id => self.class.name,
+ 'y' => year,
+ 'm' => month,
+ 'd' => day,
+ 'H' => hour,
+ 'M' => min,
+ 'S' => sec,
+ 'of' => offset.to_s,
+ 'sg' => start,
+ }
+ end
+
+ # Stores class name (DateTime) with Julian year <tt>y</tt>, month <tt>m</tt>,
+ # day <tt>d</tt>, hour <tt>H</tt>, minute <tt>M</tt>, second <tt>S</tt>,
+ # offset <tt>of</tt> and Day of Calendar Reform <tt>sg</tt> as JSON string
+ def to_json(*args)
+ as_json.to_json(*args)
+ end
+end
+
+
diff --git a/ext/json/lib/json/add/exception.rb b/ext/json/lib/json/add/exception.rb
new file mode 100644
index 0000000000..e6ad257abf
--- /dev/null
+++ b/ext/json/lib/json/add/exception.rb
@@ -0,0 +1,31 @@
+unless defined?(::JSON::JSON_LOADED) and ::JSON::JSON_LOADED
+ require 'json'
+end
+
+# Exception serialization/deserialization
+class Exception
+
+ # Deserializes JSON string by constructing new Exception object with message
+ # <tt>m</tt> and backtrace <tt>b</tt> serialized with <tt>to_json</tt>
+ def self.json_create(object)
+ result = new(object['m'])
+ result.set_backtrace object['b']
+ result
+ end
+
+ # Returns a hash, that will be turned into a JSON object and represent this
+ # object.
+ def as_json(*)
+ {
+ JSON.create_id => self.class.name,
+ 'm' => message,
+ 'b' => backtrace,
+ }
+ end
+
+ # Stores class name (Exception) with message <tt>m</tt> and backtrace array
+ # <tt>b</tt> as JSON string
+ def to_json(*args)
+ as_json.to_json(*args)
+ end
+end
diff --git a/ext/json/lib/json/add/ostruct.rb b/ext/json/lib/json/add/ostruct.rb
new file mode 100644
index 0000000000..da81e107a7
--- /dev/null
+++ b/ext/json/lib/json/add/ostruct.rb
@@ -0,0 +1,31 @@
+unless defined?(::JSON::JSON_LOADED) and ::JSON::JSON_LOADED
+ require 'json'
+end
+require 'ostruct'
+
+# OpenStruct serialization/deserialization
+class OpenStruct
+
+ # Deserializes JSON string by constructing new Struct object with values
+ # <tt>v</tt> serialized by <tt>to_json</tt>.
+ def self.json_create(object)
+ new(object['t'] || object[:t])
+ end
+
+ # Returns a hash, that will be turned into a JSON object and represent this
+ # object.
+ def as_json(*)
+ klass = self.class.name
+ klass.to_s.empty? and raise JSON::JSONError, "Only named structs are supported!"
+ {
+ JSON.create_id => klass,
+ 't' => table,
+ }
+ end
+
+ # Stores class name (OpenStruct) with this struct's values <tt>v</tt> as a
+ # JSON string.
+ def to_json(*args)
+ as_json.to_json(*args)
+ end
+end
diff --git a/ext/json/lib/json/add/range.rb b/ext/json/lib/json/add/range.rb
new file mode 100644
index 0000000000..e61e553cdb
--- /dev/null
+++ b/ext/json/lib/json/add/range.rb
@@ -0,0 +1,29 @@
+unless defined?(::JSON::JSON_LOADED) and ::JSON::JSON_LOADED
+ require 'json'
+end
+
+# Range serialization/deserialization
+class Range
+
+ # Deserializes JSON string by constructing new Range object with arguments
+ # <tt>a</tt> serialized by <tt>to_json</tt>.
+ def self.json_create(object)
+ new(*object['a'])
+ end
+
+ # Returns a hash, that will be turned into a JSON object and represent this
+ # object.
+ def as_json(*)
+ {
+ JSON.create_id => self.class.name,
+ 'a' => [ first, last, exclude_end? ]
+ }
+ end
+
+ # Stores class name (Range) with JSON array of arguments <tt>a</tt> which
+ # include <tt>first</tt> (integer), <tt>last</tt> (integer), and
+ # <tt>exclude_end?</tt> (boolean) as JSON string.
+ def to_json(*args)
+ as_json.to_json(*args)
+ end
+end
diff --git a/ext/json/lib/json/add/rational.rb b/ext/json/lib/json/add/rational.rb
new file mode 100644
index 0000000000..ee39c20e8d
--- /dev/null
+++ b/ext/json/lib/json/add/rational.rb
@@ -0,0 +1,27 @@
+unless defined?(::JSON::JSON_LOADED) and ::JSON::JSON_LOADED
+ require 'json'
+end
+defined?(::Rational) or require 'rational'
+
+class Rational
+ # Deserializes JSON string by converting numerator value <tt>n</tt>,
+ # denominator value <tt>d</tt>, to a Rational object.
+ def self.json_create(object)
+ Rational(object['n'], object['d'])
+ end
+
+ # Returns a hash, that will be turned into a JSON object and represent this
+ # object.
+ def as_json(*)
+ {
+ JSON.create_id => self.class.name,
+ 'n' => numerator,
+ 'd' => denominator,
+ }
+ end
+
+ # Stores class name (Rational) along with numerator value <tt>n</tt> and denominator value <tt>d</tt> as JSON string
+ def to_json(*)
+ as_json.to_json
+ end
+end
diff --git a/ext/json/lib/json/add/regexp.rb b/ext/json/lib/json/add/regexp.rb
new file mode 100644
index 0000000000..2fcbb6fb14
--- /dev/null
+++ b/ext/json/lib/json/add/regexp.rb
@@ -0,0 +1,30 @@
+unless defined?(::JSON::JSON_LOADED) and ::JSON::JSON_LOADED
+ require 'json'
+end
+
+# Regexp serialization/deserialization
+class Regexp
+
+ # Deserializes JSON string by constructing new Regexp object with source
+ # <tt>s</tt> (Regexp or String) and options <tt>o</tt> serialized by
+ # <tt>to_json</tt>
+ def self.json_create(object)
+ new(object['s'], object['o'])
+ end
+
+ # Returns a hash, that will be turned into a JSON object and represent this
+ # object.
+ def as_json(*)
+ {
+ JSON.create_id => self.class.name,
+ 'o' => options,
+ 's' => source,
+ }
+ end
+
+ # Stores class name (Regexp) with options <tt>o</tt> and source <tt>s</tt>
+ # (Regexp or String) as JSON string
+ def to_json(*)
+ as_json.to_json
+ end
+end
diff --git a/ext/json/lib/json/add/struct.rb b/ext/json/lib/json/add/struct.rb
new file mode 100644
index 0000000000..6847cde99b
--- /dev/null
+++ b/ext/json/lib/json/add/struct.rb
@@ -0,0 +1,30 @@
+unless defined?(::JSON::JSON_LOADED) and ::JSON::JSON_LOADED
+ require 'json'
+end
+
+# Struct serialization/deserialization
+class Struct
+
+ # Deserializes JSON string by constructing new Struct object with values
+ # <tt>v</tt> serialized by <tt>to_json</tt>.
+ def self.json_create(object)
+ new(*object['v'])
+ end
+
+ # Returns a hash, that will be turned into a JSON object and represent this
+ # object.
+ def as_json(*)
+ klass = self.class.name
+ klass.to_s.empty? and raise JSON::JSONError, "Only named structs are supported!"
+ {
+ JSON.create_id => klass,
+ 'v' => values,
+ }
+ end
+
+ # Stores class name (Struct) with Struct values <tt>v</tt> as a JSON string.
+ # Only named structs are supported.
+ def to_json(*args)
+ as_json.to_json(*args)
+ end
+end
diff --git a/ext/json/lib/json/add/symbol.rb b/ext/json/lib/json/add/symbol.rb
new file mode 100644
index 0000000000..03dc9a56a5
--- /dev/null
+++ b/ext/json/lib/json/add/symbol.rb
@@ -0,0 +1,25 @@
+unless defined?(::JSON::JSON_LOADED) and ::JSON::JSON_LOADED
+ require 'json'
+end
+
+# Symbol serialization/deserialization
+class Symbol
+ # Returns a hash, that will be turned into a JSON object and represent this
+ # object.
+ def as_json(*)
+ {
+ JSON.create_id => self.class.name,
+ 's' => to_s,
+ }
+ end
+
+ # Stores class name (Symbol) with String representation of Symbol as a JSON string.
+ def to_json(*a)
+ as_json.to_json(*a)
+ end
+
+ # Deserializes JSON string by converting the <tt>string</tt> value stored in the object to a Symbol
+ def self.json_create(o)
+ o['s'].to_sym
+ end
+end
diff --git a/ext/json/lib/json/add/time.rb b/ext/json/lib/json/add/time.rb
new file mode 100644
index 0000000000..d9834677ac
--- /dev/null
+++ b/ext/json/lib/json/add/time.rb
@@ -0,0 +1,38 @@
+unless defined?(::JSON::JSON_LOADED) and ::JSON::JSON_LOADED
+ require 'json'
+end
+
+# Time serialization/deserialization
+class Time
+
+ # Deserializes JSON string by converting time since epoch to Time
+ def self.json_create(object)
+ if usec = object.delete('u') # used to be tv_usec -> tv_nsec
+ object['n'] = usec * 1000
+ end
+ if method_defined?(:tv_nsec)
+ at(object['s'], Rational(object['n'], 1000))
+ else
+ at(object['s'], object['n'] / 1000)
+ end
+ end
+
+ # Returns a hash, that will be turned into a JSON object and represent this
+ # object.
+ def as_json(*)
+ nanoseconds = [ tv_usec * 1000 ]
+ respond_to?(:tv_nsec) and nanoseconds << tv_nsec
+ nanoseconds = nanoseconds.max
+ {
+ JSON.create_id => self.class.name,
+ 's' => tv_sec,
+ 'n' => nanoseconds,
+ }
+ end
+
+ # Stores class name (Time) with number of seconds since epoch and number of
+ # microseconds for Time as JSON string
+ def to_json(*args)
+ as_json.to_json(*args)
+ end
+end
diff --git a/ext/json/lib/json/common.rb b/ext/json/lib/json/common.rb
new file mode 100644
index 0000000000..f44184e138
--- /dev/null
+++ b/ext/json/lib/json/common.rb
@@ -0,0 +1,484 @@
+require 'json/version'
+require 'json/generic_object'
+
+module JSON
+ class << self
+ # If _object_ is string-like, parse the string and return the parsed result
+ # as a Ruby data structure. Otherwise generate a JSON text from the Ruby
+ # data structure object and return it.
+ #
+ # The _opts_ argument is passed through to generate/parse respectively. See
+ # generate and parse for their documentation.
+ def [](object, opts = {})
+ if object.respond_to? :to_str
+ JSON.parse(object.to_str, opts)
+ else
+ JSON.generate(object, opts)
+ end
+ end
+
+ # Returns the JSON parser class that is used by JSON. This is either
+ # JSON::Ext::Parser or JSON::Pure::Parser.
+ attr_reader :parser
+
+ # Set the JSON parser class _parser_ to be used by JSON.
+ def parser=(parser) # :nodoc:
+ @parser = parser
+ remove_const :Parser if JSON.const_defined_in?(self, :Parser)
+ const_set :Parser, parser
+ end
+
+ # Return the constant located at _path_. The format of _path_ has to be
+ # either ::A::B::C or A::B::C. In any case, A has to be located at the top
+ # level (absolute namespace path?). If there doesn't exist a constant at
+ # the given path, an ArgumentError is raised.
+ def deep_const_get(path) # :nodoc:
+ path.to_s.split(/::/).inject(Object) do |p, c|
+ case
+ when c.empty? then p
+ when JSON.const_defined_in?(p, c) then p.const_get(c)
+ else
+ begin
+ p.const_missing(c)
+ rescue NameError => e
+ raise ArgumentError, "can't get const #{path}: #{e}"
+ end
+ end
+ end
+ end
+
+ # Set the module _generator_ to be used by JSON.
+ def generator=(generator) # :nodoc:
+ old, $VERBOSE = $VERBOSE, nil
+ @generator = generator
+ generator_methods = generator::GeneratorMethods
+ for const in generator_methods.constants
+ klass = deep_const_get(const)
+ modul = generator_methods.const_get(const)
+ klass.class_eval do
+ instance_methods(false).each do |m|
+ m.to_s == 'to_json' and remove_method m
+ end
+ include modul
+ end
+ end
+ self.state = generator::State
+ const_set :State, self.state
+ const_set :SAFE_STATE_PROTOTYPE, State.new
+ const_set :FAST_STATE_PROTOTYPE, State.new(
+ :indent => '',
+ :space => '',
+ :object_nl => "",
+ :array_nl => "",
+ :max_nesting => false
+ )
+ const_set :PRETTY_STATE_PROTOTYPE, State.new(
+ :indent => ' ',
+ :space => ' ',
+ :object_nl => "\n",
+ :array_nl => "\n"
+ )
+ ensure
+ $VERBOSE = old
+ end
+
+ # Returns the JSON generator module that is used by JSON. This is
+ # either JSON::Ext::Generator or JSON::Pure::Generator.
+ attr_reader :generator
+
+ # Returns the JSON generator state class that is used by JSON. This is
+ # either JSON::Ext::Generator::State or JSON::Pure::Generator::State.
+ attr_accessor :state
+
+ # This is create identifier, which is used to decide if the _json_create_
+ # hook of a class should be called. It defaults to 'json_class'.
+ attr_accessor :create_id
+ end
+ self.create_id = 'json_class'
+
+ NaN = 0.0/0
+
+ Infinity = 1.0/0
+
+ MinusInfinity = -Infinity
+
+ # The base exception for JSON errors.
+ class JSONError < StandardError
+ def self.wrap(exception)
+ obj = new("Wrapped(#{exception.class}): #{exception.message.inspect}")
+ obj.set_backtrace exception.backtrace
+ obj
+ end
+ end
+
+ # This exception is raised if a parser error occurs.
+ class ParserError < JSONError; end
+
+ # This exception is raised if the nesting of parsed data structures is too
+ # deep.
+ class NestingError < ParserError; end
+
+ # :stopdoc:
+ class CircularDatastructure < NestingError; end
+ # :startdoc:
+
+ # This exception is raised if a generator or unparser error occurs.
+ class GeneratorError < JSONError; end
+ # For backwards compatibility
+ UnparserError = GeneratorError
+
+ # This exception is raised if the required unicode support is missing on the
+ # system. Usually this means that the iconv library is not installed.
+ class MissingUnicodeSupport < JSONError; end
+
+ module_function
+
+ # Parse the JSON document _source_ into a Ruby data structure and return it.
+ #
+ # _opts_ can have the following
+ # keys:
+ # * *max_nesting*: The maximum depth of nesting allowed in the parsed data
+ # structures. Disable depth checking with :max_nesting => false. It defaults
+ # to 100.
+ # * *allow_nan*: If set to true, allow NaN, Infinity and -Infinity in
+ # defiance of RFC 4627 to be parsed by the Parser. This option defaults
+ # to false.
+ # * *symbolize_names*: If set to true, returns symbols for the names
+ # (keys) in a JSON object. Otherwise strings are returned. Strings are
+ # the default.
+ # * *create_additions*: If set to false, the Parser doesn't create
+ # additions even if a matching class and create_id was found. This option
+ # defaults to false.
+ # * *object_class*: Defaults to Hash
+ # * *array_class*: Defaults to Array
+ def parse(source, opts = {})
+ Parser.new(source, opts).parse
+ end
+
+ # Parse the JSON document _source_ into a Ruby data structure and return it.
+ # The bang version of the parse method defaults to the more dangerous values
+ # for the _opts_ hash, so be sure only to parse trusted _source_ documents.
+ #
+ # _opts_ can have the following keys:
+ # * *max_nesting*: The maximum depth of nesting allowed in the parsed data
+ # structures. Enable depth checking with :max_nesting => anInteger. The parse!
+ # methods defaults to not doing max depth checking: This can be dangerous
+ # if someone wants to fill up your stack.
+ # * *allow_nan*: If set to true, allow NaN, Infinity, and -Infinity in
+ # defiance of RFC 4627 to be parsed by the Parser. This option defaults
+ # to true.
+ # * *create_additions*: If set to false, the Parser doesn't create
+ # additions even if a matching class and create_id was found. This option
+ # defaults to false.
+ def parse!(source, opts = {})
+ opts = {
+ :max_nesting => false,
+ :allow_nan => true
+ }.update(opts)
+ Parser.new(source, opts).parse
+ end
+
+ # Generate a JSON document from the Ruby data structure _obj_ and return
+ # it. _state_ is * a JSON::State object,
+ # * or a Hash like object (responding to to_hash),
+ # * an object convertible into a hash by a to_h method,
+ # that is used as or to configure a State object.
+ #
+ # It defaults to a state object, that creates the shortest possible JSON text
+ # in one line, checks for circular data structures and doesn't allow NaN,
+ # Infinity, and -Infinity.
+ #
+ # A _state_ hash can have the following keys:
+ # * *indent*: a string used to indent levels (default: ''),
+ # * *space*: a string that is put after, a : or , delimiter (default: ''),
+ # * *space_before*: a string that is put before a : pair delimiter (default: ''),
+ # * *object_nl*: a string that is put at the end of a JSON object (default: ''),
+ # * *array_nl*: a string that is put at the end of a JSON array (default: ''),
+ # * *allow_nan*: true if NaN, Infinity, and -Infinity should be
+ # generated, otherwise an exception is thrown if these values are
+ # encountered. This options defaults to false.
+ # * *max_nesting*: The maximum depth of nesting allowed in the data
+ # structures from which JSON is to be generated. Disable depth checking
+ # with :max_nesting => false, it defaults to 100.
+ #
+ # See also the fast_generate for the fastest creation method with the least
+ # amount of sanity checks, and the pretty_generate method for some
+ # defaults for pretty output.
+ def generate(obj, opts = nil)
+ if State === opts
+ state, opts = opts, nil
+ else
+ state = SAFE_STATE_PROTOTYPE.dup
+ end
+ if opts
+ if opts.respond_to? :to_hash
+ opts = opts.to_hash
+ elsif opts.respond_to? :to_h
+ opts = opts.to_h
+ else
+ raise TypeError, "can't convert #{opts.class} into Hash"
+ end
+ state = state.configure(opts)
+ end
+ state.generate(obj)
+ end
+
+ # :stopdoc:
+ # I want to deprecate these later, so I'll first be silent about them, and
+ # later delete them.
+ alias unparse generate
+ module_function :unparse
+ # :startdoc:
+
+ # Generate a JSON document from the Ruby data structure _obj_ and return it.
+ # This method disables the checks for circles in Ruby objects.
+ #
+ # *WARNING*: Be careful not to pass any Ruby data structures with circles as
+ # _obj_ argument because this will cause JSON to go into an infinite loop.
+ def fast_generate(obj, opts = nil)
+ if State === opts
+ state, opts = opts, nil
+ else
+ state = FAST_STATE_PROTOTYPE.dup
+ end
+ if opts
+ if opts.respond_to? :to_hash
+ opts = opts.to_hash
+ elsif opts.respond_to? :to_h
+ opts = opts.to_h
+ else
+ raise TypeError, "can't convert #{opts.class} into Hash"
+ end
+ state.configure(opts)
+ end
+ state.generate(obj)
+ end
+
+ # :stopdoc:
+ # I want to deprecate these later, so I'll first be silent about them, and later delete them.
+ alias fast_unparse fast_generate
+ module_function :fast_unparse
+ # :startdoc:
+
+ # Generate a JSON document from the Ruby data structure _obj_ and return it.
+ # The returned document is a prettier form of the document returned by
+ # #unparse.
+ #
+ # The _opts_ argument can be used to configure the generator. See the
+ # generate method for a more detailed explanation.
+ def pretty_generate(obj, opts = nil)
+ if State === opts
+ state, opts = opts, nil
+ else
+ state = PRETTY_STATE_PROTOTYPE.dup
+ end
+ if opts
+ if opts.respond_to? :to_hash
+ opts = opts.to_hash
+ elsif opts.respond_to? :to_h
+ opts = opts.to_h
+ else
+ raise TypeError, "can't convert #{opts.class} into Hash"
+ end
+ state.configure(opts)
+ end
+ state.generate(obj)
+ end
+
+ # :stopdoc:
+ # I want to deprecate these later, so I'll first be silent about them, and later delete them.
+ alias pretty_unparse pretty_generate
+ module_function :pretty_unparse
+ # :startdoc:
+
+ class << self
+ # The global default options for the JSON.load method:
+ # :max_nesting: false
+ # :allow_nan: true
+ # :quirks_mode: true
+ attr_accessor :load_default_options
+ end
+ self.load_default_options = {
+ :max_nesting => false,
+ :allow_nan => true,
+ :quirks_mode => true,
+ :create_additions => true,
+ }
+
+ # Load a ruby data structure from a JSON _source_ and return it. A source can
+ # either be a string-like object, an IO-like object, or an object responding
+ # to the read method. If _proc_ was given, it will be called with any nested
+ # Ruby object as an argument recursively in depth first order. To modify the
+ # default options pass in the optional _options_ argument as well.
+ #
+ # BEWARE: This method is meant to serialise data from trusted user input,
+ # like from your own database server or clients under your control, it could
+ # be dangerous to allow untrusted users to pass JSON sources into it. The
+ # default options for the parser can be changed via the load_default_options
+ # method.
+ #
+ # This method is part of the implementation of the load/dump interface of
+ # Marshal and YAML.
+ def load(source, proc = nil, options = {})
+ opts = load_default_options.merge options
+ if source.respond_to? :to_str
+ source = source.to_str
+ elsif source.respond_to? :to_io
+ source = source.to_io.read
+ elsif source.respond_to?(:read)
+ source = source.read
+ end
+ if opts[:quirks_mode] && (source.nil? || source.empty?)
+ source = 'null'
+ end
+ result = parse(source, opts)
+ recurse_proc(result, &proc) if proc
+ result
+ end
+
+ # Recursively calls passed _Proc_ if the parsed data structure is an _Array_ or _Hash_
+ def recurse_proc(result, &proc)
+ case result
+ when Array
+ result.each { |x| recurse_proc x, &proc }
+ proc.call result
+ when Hash
+ result.each { |x, y| recurse_proc x, &proc; recurse_proc y, &proc }
+ proc.call result
+ else
+ proc.call result
+ end
+ end
+
+ alias restore load
+ module_function :restore
+
+ class << self
+ # The global default options for the JSON.dump method:
+ # :max_nesting: false
+ # :allow_nan: true
+ # :quirks_mode: true
+ attr_accessor :dump_default_options
+ end
+ self.dump_default_options = {
+ :max_nesting => false,
+ :allow_nan => true,
+ :quirks_mode => true,
+ }
+
+ # Dumps _obj_ as a JSON string, i.e. calls generate on the object and returns
+ # the result.
+ #
+ # If anIO (an IO-like object or an object that responds to the write method)
+ # was given, the resulting JSON is written to it.
+ #
+ # If the number of nested arrays or objects exceeds _limit_, an ArgumentError
+ # exception is raised. This argument is similar (but not exactly the
+ # same!) to the _limit_ argument in Marshal.dump.
+ #
+ # The default options for the generator can be changed via the
+ # dump_default_options method.
+ #
+ # This method is part of the implementation of the load/dump interface of
+ # Marshal and YAML.
+ def dump(obj, anIO = nil, limit = nil)
+ if anIO and limit.nil?
+ anIO = anIO.to_io if anIO.respond_to?(:to_io)
+ unless anIO.respond_to?(:write)
+ limit = anIO
+ anIO = nil
+ end
+ end
+ opts = JSON.dump_default_options
+ opts = opts.merge(:max_nesting => limit) if limit
+ result = generate(obj, opts)
+ if anIO
+ anIO.write result
+ anIO
+ else
+ result
+ end
+ rescue JSON::NestingError
+ raise ArgumentError, "exceed depth limit"
+ end
+
+ # Swap consecutive bytes of _string_ in place.
+ def self.swap!(string) # :nodoc:
+ 0.upto(string.size / 2) do |i|
+ break unless string[2 * i + 1]
+ string[2 * i], string[2 * i + 1] = string[2 * i + 1], string[2 * i]
+ end
+ string
+ end
+
+ # Shortcut for iconv.
+ if ::String.method_defined?(:encode)
+ # Encodes string using Ruby's _String.encode_
+ def self.iconv(to, from, string)
+ string.encode(to, from)
+ end
+ else
+ require 'iconv'
+ # Encodes string using _iconv_ library
+ def self.iconv(to, from, string)
+ Iconv.conv(to, from, string)
+ end
+ end
+
+ if ::Object.method(:const_defined?).arity == 1
+ def self.const_defined_in?(modul, constant)
+ modul.const_defined?(constant)
+ end
+ else
+ def self.const_defined_in?(modul, constant)
+ modul.const_defined?(constant, false)
+ end
+ end
+end
+
+module ::Kernel
+ private
+
+ # Outputs _objs_ to STDOUT as JSON strings in the shortest form, that is in
+ # one line.
+ def j(*objs)
+ objs.each do |obj|
+ puts JSON::generate(obj, :allow_nan => true, :max_nesting => false)
+ end
+ nil
+ end
+
+ # Outputs _objs_ to STDOUT as JSON strings in a pretty format, with
+ # indentation and over many lines.
+ def jj(*objs)
+ objs.each do |obj|
+ puts JSON::pretty_generate(obj, :allow_nan => true, :max_nesting => false)
+ end
+ nil
+ end
+
+ # If _object_ is string-like, parse the string and return the parsed result as
+ # a Ruby data structure. Otherwise, generate a JSON text from the Ruby data
+ # structure object and return it.
+ #
+ # The _opts_ argument is passed through to generate/parse respectively. See
+ # generate and parse for their documentation.
+ def JSON(object, *args)
+ if object.respond_to? :to_str
+ JSON.parse(object.to_str, args.first)
+ else
+ JSON.generate(object, args.first)
+ end
+ end
+end
+
+# Extends any Class to include _json_creatable?_ method.
+class ::Class
+ # Returns true if this class can be used to create an instance
+ # from a serialised JSON string. The class has to implement a class
+ # method _json_create_ that expects a hash as first parameter. The hash
+ # should include the required data.
+ def json_creatable?
+ respond_to?(:json_create)
+ end
+end
diff --git a/ext/json/lib/json/ext.rb b/ext/json/lib/json/ext.rb
new file mode 100644
index 0000000000..c5f813181d
--- /dev/null
+++ b/ext/json/lib/json/ext.rb
@@ -0,0 +1,21 @@
+if ENV['SIMPLECOV_COVERAGE'].to_i == 1
+ require 'simplecov'
+ SimpleCov.start do
+ add_filter "/tests/"
+ end
+end
+require 'json/common'
+
+module JSON
+ # This module holds all the modules/classes that implement JSON's
+ # functionality as C extensions.
+ module Ext
+ require 'json/ext/parser'
+ require 'json/ext/generator'
+ $DEBUG and warn "Using Ext extension for JSON."
+ JSON.parser = Parser
+ JSON.generator = Generator
+ end
+
+ JSON_LOADED = true unless defined?(::JSON::JSON_LOADED)
+end
diff --git a/ext/json/lib/json/generic_object.rb b/ext/json/lib/json/generic_object.rb
new file mode 100644
index 0000000000..3a14f0ea7d
--- /dev/null
+++ b/ext/json/lib/json/generic_object.rb
@@ -0,0 +1,62 @@
+require 'ostruct'
+
+module JSON
+ class GenericObject < OpenStruct
+ class << self
+ alias [] new
+
+ def json_creatable?
+ @json_creatable
+ end
+
+ attr_writer :json_creatable
+
+ def json_create(data)
+ data = data.dup
+ data.delete JSON.create_id
+ self[data]
+ end
+
+ def from_hash(object)
+ case
+ when object.respond_to?(:to_hash)
+ result = new
+ object.to_hash.each do |key, value|
+ result[key] = from_hash(value)
+ end
+ result
+ when object.respond_to?(:to_ary)
+ object.to_ary.map { |a| from_hash(a) }
+ else
+ object
+ end
+ end
+
+ def load(source, proc = nil, opts = {})
+ result = ::JSON.load(source, proc, opts.merge(:object_class => self))
+ result.nil? ? new : result
+ end
+
+ def dump(obj, *args)
+ ::JSON.dump(obj, *args)
+ end
+ end
+ self.json_creatable = false
+
+ def to_hash
+ table
+ end
+
+ def |(other)
+ self.class[other.to_hash.merge(to_hash)]
+ end
+
+ def as_json(*)
+ { JSON.create_id => self.class.name }.merge to_hash
+ end
+
+ def to_json(*a)
+ as_json.to_json(*a)
+ end
+ end
+end
diff --git a/ext/json/lib/json/version.rb b/ext/json/lib/json/version.rb
new file mode 100644
index 0000000000..47cdcd607c
--- /dev/null
+++ b/ext/json/lib/json/version.rb
@@ -0,0 +1,8 @@
+module JSON
+ # JSON version
+ VERSION = '1.8.1'
+ VERSION_ARRAY = VERSION.split(/\./).map { |x| x.to_i } # :nodoc:
+ VERSION_MAJOR = VERSION_ARRAY[0] # :nodoc:
+ VERSION_MINOR = VERSION_ARRAY[1] # :nodoc:
+ VERSION_BUILD = VERSION_ARRAY[2] # :nodoc:
+end
diff --git a/ext/json/parser/depend b/ext/json/parser/depend
new file mode 100644
index 0000000000..bc5db06404
--- /dev/null
+++ b/ext/json/parser/depend
@@ -0,0 +1,20 @@
+$(OBJS): $(ruby_headers)
+parser.o: parser.c parser.h $(srcdir)/../fbuffer/fbuffer.h
+
+# AUTOGENERATED DEPENDENCIES START
+parser.o: $(RUBY_EXTCONF_H)
+parser.o: $(arch_hdrdir)/ruby/config.h
+parser.o: $(hdrdir)/ruby/defines.h
+parser.o: $(hdrdir)/ruby/encoding.h
+parser.o: $(hdrdir)/ruby/intern.h
+parser.o: $(hdrdir)/ruby/missing.h
+parser.o: $(hdrdir)/ruby/oniguruma.h
+parser.o: $(hdrdir)/ruby/ruby.h
+parser.o: $(hdrdir)/ruby/st.h
+parser.o: $(hdrdir)/ruby/subst.h
+parser.o: $(top_srcdir)/ext/json/fbuffer/fbuffer.h
+parser.o: $(top_srcdir)/include/ruby.h
+parser.o: parser.c
+parser.o: parser.h
+parser.o: parser.rl
+# AUTOGENERATED DEPENDENCIES END
diff --git a/ext/json/parser/extconf.rb b/ext/json/parser/extconf.rb
new file mode 100644
index 0000000000..ae4f861c79
--- /dev/null
+++ b/ext/json/parser/extconf.rb
@@ -0,0 +1,3 @@
+require 'mkmf'
+
+create_makefile 'json/ext/parser'
diff --git a/ext/json/parser/parser.c b/ext/json/parser/parser.c
new file mode 100644
index 0000000000..eed58e5d39
--- /dev/null
+++ b/ext/json/parser/parser.c
@@ -0,0 +1,2222 @@
+
+#line 1 "parser.rl"
+#include "../fbuffer/fbuffer.h"
+#include "parser.h"
+
+/* unicode */
+
+static const char digit_values[256] = {
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, -1,
+ -1, -1, -1, -1, -1, -1, 10, 11, 12, 13, 14, 15, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ 10, 11, 12, 13, 14, 15, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1
+};
+
+static UTF32 unescape_unicode(const unsigned char *p)
+{
+ char b;
+ UTF32 result = 0;
+ b = digit_values[p[0]];
+ if (b < 0) return UNI_REPLACEMENT_CHAR;
+ result = (result << 4) | (unsigned char)b;
+ b = digit_values[p[1]];
+ if (b < 0) return UNI_REPLACEMENT_CHAR;
+ result = (result << 4) | (unsigned char)b;
+ b = digit_values[p[2]];
+ if (b < 0) return UNI_REPLACEMENT_CHAR;
+ result = (result << 4) | (unsigned char)b;
+ b = digit_values[p[3]];
+ if (b < 0) return UNI_REPLACEMENT_CHAR;
+ result = (result << 4) | (unsigned char)b;
+ return result;
+}
+
+static int convert_UTF32_to_UTF8(char *buf, UTF32 ch)
+{
+ int len = 1;
+ if (ch <= 0x7F) {
+ buf[0] = (char) ch;
+ } else if (ch <= 0x07FF) {
+ buf[0] = (char) ((ch >> 6) | 0xC0);
+ buf[1] = (char) ((ch & 0x3F) | 0x80);
+ len++;
+ } else if (ch <= 0xFFFF) {
+ buf[0] = (char) ((ch >> 12) | 0xE0);
+ buf[1] = (char) (((ch >> 6) & 0x3F) | 0x80);
+ buf[2] = (char) ((ch & 0x3F) | 0x80);
+ len += 2;
+ } else if (ch <= 0x1fffff) {
+ buf[0] =(char) ((ch >> 18) | 0xF0);
+ buf[1] =(char) (((ch >> 12) & 0x3F) | 0x80);
+ buf[2] =(char) (((ch >> 6) & 0x3F) | 0x80);
+ buf[3] =(char) ((ch & 0x3F) | 0x80);
+ len += 3;
+ } else {
+ buf[0] = '?';
+ }
+ return len;
+}
+
+#ifdef HAVE_RUBY_ENCODING_H
+static VALUE CEncoding_ASCII_8BIT, CEncoding_UTF_8, CEncoding_UTF_16BE,
+ CEncoding_UTF_16LE, CEncoding_UTF_32BE, CEncoding_UTF_32LE;
+static ID i_encoding, i_encode;
+#else
+static ID i_iconv;
+#endif
+
+static VALUE mJSON, mExt, cParser, eParserError, eNestingError;
+static VALUE CNaN, CInfinity, CMinusInfinity;
+
+static ID i_json_creatable_p, i_json_create, i_create_id, i_create_additions,
+ i_chr, i_max_nesting, i_allow_nan, i_symbolize_names, i_quirks_mode,
+ i_object_class, i_array_class, i_key_p, i_deep_const_get, i_match,
+ i_match_string, i_aset, i_aref, i_leftshift;
+
+
+#line 110 "parser.rl"
+
+
+
+#line 92 "parser.c"
+enum {JSON_object_start = 1};
+enum {JSON_object_first_final = 27};
+enum {JSON_object_error = 0};
+
+enum {JSON_object_en_main = 1};
+
+
+#line 151 "parser.rl"
+
+
+static char *JSON_parse_object(JSON_Parser *json, char *p, char *pe, VALUE *result)
+{
+ int cs = EVIL;
+ VALUE last_name = Qnil;
+ VALUE object_class = json->object_class;
+
+ if (json->max_nesting && json->current_nesting > json->max_nesting) {
+ rb_raise(eNestingError, "nesting of %d is too deep", json->current_nesting);
+ }
+
+ *result = NIL_P(object_class) ? rb_hash_new() : rb_class_new_instance(0, 0, object_class);
+
+
+#line 116 "parser.c"
+ {
+ cs = JSON_object_start;
+ }
+
+#line 166 "parser.rl"
+
+#line 123 "parser.c"
+ {
+ if ( p == pe )
+ goto _test_eof;
+ switch ( cs )
+ {
+case 1:
+ if ( (*p) == 123 )
+ goto st2;
+ goto st0;
+st0:
+cs = 0;
+ goto _out;
+st2:
+ if ( ++p == pe )
+ goto _test_eof2;
+case 2:
+ switch( (*p) ) {
+ case 13: goto st2;
+ case 32: goto st2;
+ case 34: goto tr2;
+ case 47: goto st23;
+ case 125: goto tr4;
+ }
+ if ( 9 <= (*p) && (*p) <= 10 )
+ goto st2;
+ goto st0;
+tr2:
+#line 133 "parser.rl"
+ {
+ char *np;
+ json->parsing_name = 1;
+ np = JSON_parse_string(json, p, pe, &last_name);
+ json->parsing_name = 0;
+ if (np == NULL) { p--; {p++; cs = 3; goto _out;} } else {p = (( np))-1;}
+ }
+ goto st3;
+st3:
+ if ( ++p == pe )
+ goto _test_eof3;
+case 3:
+#line 164 "parser.c"
+ switch( (*p) ) {
+ case 13: goto st3;
+ case 32: goto st3;
+ case 47: goto st4;
+ case 58: goto st8;
+ }
+ if ( 9 <= (*p) && (*p) <= 10 )
+ goto st3;
+ goto st0;
+st4:
+ if ( ++p == pe )
+ goto _test_eof4;
+case 4:
+ switch( (*p) ) {
+ case 42: goto st5;
+ case 47: goto st7;
+ }
+ goto st0;
+st5:
+ if ( ++p == pe )
+ goto _test_eof5;
+case 5:
+ if ( (*p) == 42 )
+ goto st6;
+ goto st5;
+st6:
+ if ( ++p == pe )
+ goto _test_eof6;
+case 6:
+ switch( (*p) ) {
+ case 42: goto st6;
+ case 47: goto st3;
+ }
+ goto st5;
+st7:
+ if ( ++p == pe )
+ goto _test_eof7;
+case 7:
+ if ( (*p) == 10 )
+ goto st3;
+ goto st7;
+st8:
+ if ( ++p == pe )
+ goto _test_eof8;
+case 8:
+ switch( (*p) ) {
+ case 13: goto st8;
+ case 32: goto st8;
+ case 34: goto tr11;
+ case 45: goto tr11;
+ case 47: goto st19;
+ case 73: goto tr11;
+ case 78: goto tr11;
+ case 91: goto tr11;
+ case 102: goto tr11;
+ case 110: goto tr11;
+ case 116: goto tr11;
+ case 123: goto tr11;
+ }
+ if ( (*p) > 10 ) {
+ if ( 48 <= (*p) && (*p) <= 57 )
+ goto tr11;
+ } else if ( (*p) >= 9 )
+ goto st8;
+ goto st0;
+tr11:
+#line 118 "parser.rl"
+ {
+ VALUE v = Qnil;
+ char *np = JSON_parse_value(json, p, pe, &v);
+ if (np == NULL) {
+ p--; {p++; cs = 9; goto _out;}
+ } else {
+ if (NIL_P(json->object_class)) {
+ rb_hash_aset(*result, last_name, v);
+ } else {
+ rb_funcall(*result, i_aset, 2, last_name, v);
+ }
+ {p = (( np))-1;}
+ }
+ }
+ goto st9;
+st9:
+ if ( ++p == pe )
+ goto _test_eof9;
+case 9:
+#line 251 "parser.c"
+ switch( (*p) ) {
+ case 13: goto st9;
+ case 32: goto st9;
+ case 44: goto st10;
+ case 47: goto st15;
+ case 125: goto tr4;
+ }
+ if ( 9 <= (*p) && (*p) <= 10 )
+ goto st9;
+ goto st0;
+st10:
+ if ( ++p == pe )
+ goto _test_eof10;
+case 10:
+ switch( (*p) ) {
+ case 13: goto st10;
+ case 32: goto st10;
+ case 34: goto tr2;
+ case 47: goto st11;
+ }
+ if ( 9 <= (*p) && (*p) <= 10 )
+ goto st10;
+ goto st0;
+st11:
+ if ( ++p == pe )
+ goto _test_eof11;
+case 11:
+ switch( (*p) ) {
+ case 42: goto st12;
+ case 47: goto st14;
+ }
+ goto st0;
+st12:
+ if ( ++p == pe )
+ goto _test_eof12;
+case 12:
+ if ( (*p) == 42 )
+ goto st13;
+ goto st12;
+st13:
+ if ( ++p == pe )
+ goto _test_eof13;
+case 13:
+ switch( (*p) ) {
+ case 42: goto st13;
+ case 47: goto st10;
+ }
+ goto st12;
+st14:
+ if ( ++p == pe )
+ goto _test_eof14;
+case 14:
+ if ( (*p) == 10 )
+ goto st10;
+ goto st14;
+st15:
+ if ( ++p == pe )
+ goto _test_eof15;
+case 15:
+ switch( (*p) ) {
+ case 42: goto st16;
+ case 47: goto st18;
+ }
+ goto st0;
+st16:
+ if ( ++p == pe )
+ goto _test_eof16;
+case 16:
+ if ( (*p) == 42 )
+ goto st17;
+ goto st16;
+st17:
+ if ( ++p == pe )
+ goto _test_eof17;
+case 17:
+ switch( (*p) ) {
+ case 42: goto st17;
+ case 47: goto st9;
+ }
+ goto st16;
+st18:
+ if ( ++p == pe )
+ goto _test_eof18;
+case 18:
+ if ( (*p) == 10 )
+ goto st9;
+ goto st18;
+tr4:
+#line 141 "parser.rl"
+ { p--; {p++; cs = 27; goto _out;} }
+ goto st27;
+st27:
+ if ( ++p == pe )
+ goto _test_eof27;
+case 27:
+#line 347 "parser.c"
+ goto st0;
+st19:
+ if ( ++p == pe )
+ goto _test_eof19;
+case 19:
+ switch( (*p) ) {
+ case 42: goto st20;
+ case 47: goto st22;
+ }
+ goto st0;
+st20:
+ if ( ++p == pe )
+ goto _test_eof20;
+case 20:
+ if ( (*p) == 42 )
+ goto st21;
+ goto st20;
+st21:
+ if ( ++p == pe )
+ goto _test_eof21;
+case 21:
+ switch( (*p) ) {
+ case 42: goto st21;
+ case 47: goto st8;
+ }
+ goto st20;
+st22:
+ if ( ++p == pe )
+ goto _test_eof22;
+case 22:
+ if ( (*p) == 10 )
+ goto st8;
+ goto st22;
+st23:
+ if ( ++p == pe )
+ goto _test_eof23;
+case 23:
+ switch( (*p) ) {
+ case 42: goto st24;
+ case 47: goto st26;
+ }
+ goto st0;
+st24:
+ if ( ++p == pe )
+ goto _test_eof24;
+case 24:
+ if ( (*p) == 42 )
+ goto st25;
+ goto st24;
+st25:
+ if ( ++p == pe )
+ goto _test_eof25;
+case 25:
+ switch( (*p) ) {
+ case 42: goto st25;
+ case 47: goto st2;
+ }
+ goto st24;
+st26:
+ if ( ++p == pe )
+ goto _test_eof26;
+case 26:
+ if ( (*p) == 10 )
+ goto st2;
+ goto st26;
+ }
+ _test_eof2: cs = 2; goto _test_eof;
+ _test_eof3: cs = 3; goto _test_eof;
+ _test_eof4: cs = 4; goto _test_eof;
+ _test_eof5: cs = 5; goto _test_eof;
+ _test_eof6: cs = 6; goto _test_eof;
+ _test_eof7: cs = 7; goto _test_eof;
+ _test_eof8: cs = 8; goto _test_eof;
+ _test_eof9: cs = 9; goto _test_eof;
+ _test_eof10: cs = 10; goto _test_eof;
+ _test_eof11: cs = 11; goto _test_eof;
+ _test_eof12: cs = 12; goto _test_eof;
+ _test_eof13: cs = 13; goto _test_eof;
+ _test_eof14: cs = 14; goto _test_eof;
+ _test_eof15: cs = 15; goto _test_eof;
+ _test_eof16: cs = 16; goto _test_eof;
+ _test_eof17: cs = 17; goto _test_eof;
+ _test_eof18: cs = 18; goto _test_eof;
+ _test_eof27: cs = 27; goto _test_eof;
+ _test_eof19: cs = 19; goto _test_eof;
+ _test_eof20: cs = 20; goto _test_eof;
+ _test_eof21: cs = 21; goto _test_eof;
+ _test_eof22: cs = 22; goto _test_eof;
+ _test_eof23: cs = 23; goto _test_eof;
+ _test_eof24: cs = 24; goto _test_eof;
+ _test_eof25: cs = 25; goto _test_eof;
+ _test_eof26: cs = 26; goto _test_eof;
+
+ _test_eof: {}
+ _out: {}
+ }
+
+#line 167 "parser.rl"
+
+ if (cs >= JSON_object_first_final) {
+ if (json->create_additions) {
+ VALUE klassname;
+ if (NIL_P(json->object_class)) {
+ klassname = rb_hash_aref(*result, json->create_id);
+ } else {
+ klassname = rb_funcall(*result, i_aref, 1, json->create_id);
+ }
+ if (!NIL_P(klassname)) {
+ VALUE klass = rb_funcall(mJSON, i_deep_const_get, 1, klassname);
+ if (RTEST(rb_funcall(klass, i_json_creatable_p, 0))) {
+ *result = rb_funcall(klass, i_json_create, 1, *result);
+ }
+ }
+ }
+ return p + 1;
+ } else {
+ return NULL;
+ }
+}
+
+
+
+#line 470 "parser.c"
+enum {JSON_value_start = 1};
+enum {JSON_value_first_final = 21};
+enum {JSON_value_error = 0};
+
+enum {JSON_value_en_main = 1};
+
+
+#line 271 "parser.rl"
+
+
+static char *JSON_parse_value(JSON_Parser *json, char *p, char *pe, VALUE *result)
+{
+ int cs = EVIL;
+
+
+#line 486 "parser.c"
+ {
+ cs = JSON_value_start;
+ }
+
+#line 278 "parser.rl"
+
+#line 493 "parser.c"
+ {
+ if ( p == pe )
+ goto _test_eof;
+ switch ( cs )
+ {
+case 1:
+ switch( (*p) ) {
+ case 34: goto tr0;
+ case 45: goto tr2;
+ case 73: goto st2;
+ case 78: goto st9;
+ case 91: goto tr5;
+ case 102: goto st11;
+ case 110: goto st15;
+ case 116: goto st18;
+ case 123: goto tr9;
+ }
+ if ( 48 <= (*p) && (*p) <= 57 )
+ goto tr2;
+ goto st0;
+st0:
+cs = 0;
+ goto _out;
+tr0:
+#line 219 "parser.rl"
+ {
+ char *np = JSON_parse_string(json, p, pe, result);
+ if (np == NULL) { p--; {p++; cs = 21; goto _out;} } else {p = (( np))-1;}
+ }
+ goto st21;
+tr2:
+#line 224 "parser.rl"
+ {
+ char *np;
+ if(pe > p + 9 - json->quirks_mode && !strncmp(MinusInfinity, p, 9)) {
+ if (json->allow_nan) {
+ *result = CMinusInfinity;
+ {p = (( p + 10))-1;}
+ p--; {p++; cs = 21; goto _out;}
+ } else {
+ rb_raise(eParserError, "%u: unexpected token at '%s'", __LINE__, p);
+ }
+ }
+ np = JSON_parse_float(json, p, pe, result);
+ if (np != NULL) {p = (( np))-1;}
+ np = JSON_parse_integer(json, p, pe, result);
+ if (np != NULL) {p = (( np))-1;}
+ p--; {p++; cs = 21; goto _out;}
+ }
+ goto st21;
+tr5:
+#line 242 "parser.rl"
+ {
+ char *np;
+ json->current_nesting++;
+ np = JSON_parse_array(json, p, pe, result);
+ json->current_nesting--;
+ if (np == NULL) { p--; {p++; cs = 21; goto _out;} } else {p = (( np))-1;}
+ }
+ goto st21;
+tr9:
+#line 250 "parser.rl"
+ {
+ char *np;
+ json->current_nesting++;
+ np = JSON_parse_object(json, p, pe, result);
+ json->current_nesting--;
+ if (np == NULL) { p--; {p++; cs = 21; goto _out;} } else {p = (( np))-1;}
+ }
+ goto st21;
+tr16:
+#line 212 "parser.rl"
+ {
+ if (json->allow_nan) {
+ *result = CInfinity;
+ } else {
+ rb_raise(eParserError, "%u: unexpected token at '%s'", __LINE__, p - 8);
+ }
+ }
+ goto st21;
+tr18:
+#line 205 "parser.rl"
+ {
+ if (json->allow_nan) {
+ *result = CNaN;
+ } else {
+ rb_raise(eParserError, "%u: unexpected token at '%s'", __LINE__, p - 2);
+ }
+ }
+ goto st21;
+tr22:
+#line 199 "parser.rl"
+ {
+ *result = Qfalse;
+ }
+ goto st21;
+tr25:
+#line 196 "parser.rl"
+ {
+ *result = Qnil;
+ }
+ goto st21;
+tr28:
+#line 202 "parser.rl"
+ {
+ *result = Qtrue;
+ }
+ goto st21;
+st21:
+ if ( ++p == pe )
+ goto _test_eof21;
+case 21:
+#line 258 "parser.rl"
+ { p--; {p++; cs = 21; goto _out;} }
+#line 608 "parser.c"
+ goto st0;
+st2:
+ if ( ++p == pe )
+ goto _test_eof2;
+case 2:
+ if ( (*p) == 110 )
+ goto st3;
+ goto st0;
+st3:
+ if ( ++p == pe )
+ goto _test_eof3;
+case 3:
+ if ( (*p) == 102 )
+ goto st4;
+ goto st0;
+st4:
+ if ( ++p == pe )
+ goto _test_eof4;
+case 4:
+ if ( (*p) == 105 )
+ goto st5;
+ goto st0;
+st5:
+ if ( ++p == pe )
+ goto _test_eof5;
+case 5:
+ if ( (*p) == 110 )
+ goto st6;
+ goto st0;
+st6:
+ if ( ++p == pe )
+ goto _test_eof6;
+case 6:
+ if ( (*p) == 105 )
+ goto st7;
+ goto st0;
+st7:
+ if ( ++p == pe )
+ goto _test_eof7;
+case 7:
+ if ( (*p) == 116 )
+ goto st8;
+ goto st0;
+st8:
+ if ( ++p == pe )
+ goto _test_eof8;
+case 8:
+ if ( (*p) == 121 )
+ goto tr16;
+ goto st0;
+st9:
+ if ( ++p == pe )
+ goto _test_eof9;
+case 9:
+ if ( (*p) == 97 )
+ goto st10;
+ goto st0;
+st10:
+ if ( ++p == pe )
+ goto _test_eof10;
+case 10:
+ if ( (*p) == 78 )
+ goto tr18;
+ goto st0;
+st11:
+ if ( ++p == pe )
+ goto _test_eof11;
+case 11:
+ if ( (*p) == 97 )
+ goto st12;
+ goto st0;
+st12:
+ if ( ++p == pe )
+ goto _test_eof12;
+case 12:
+ if ( (*p) == 108 )
+ goto st13;
+ goto st0;
+st13:
+ if ( ++p == pe )
+ goto _test_eof13;
+case 13:
+ if ( (*p) == 115 )
+ goto st14;
+ goto st0;
+st14:
+ if ( ++p == pe )
+ goto _test_eof14;
+case 14:
+ if ( (*p) == 101 )
+ goto tr22;
+ goto st0;
+st15:
+ if ( ++p == pe )
+ goto _test_eof15;
+case 15:
+ if ( (*p) == 117 )
+ goto st16;
+ goto st0;
+st16:
+ if ( ++p == pe )
+ goto _test_eof16;
+case 16:
+ if ( (*p) == 108 )
+ goto st17;
+ goto st0;
+st17:
+ if ( ++p == pe )
+ goto _test_eof17;
+case 17:
+ if ( (*p) == 108 )
+ goto tr25;
+ goto st0;
+st18:
+ if ( ++p == pe )
+ goto _test_eof18;
+case 18:
+ if ( (*p) == 114 )
+ goto st19;
+ goto st0;
+st19:
+ if ( ++p == pe )
+ goto _test_eof19;
+case 19:
+ if ( (*p) == 117 )
+ goto st20;
+ goto st0;
+st20:
+ if ( ++p == pe )
+ goto _test_eof20;
+case 20:
+ if ( (*p) == 101 )
+ goto tr28;
+ goto st0;
+ }
+ _test_eof21: cs = 21; goto _test_eof;
+ _test_eof2: cs = 2; goto _test_eof;
+ _test_eof3: cs = 3; goto _test_eof;
+ _test_eof4: cs = 4; goto _test_eof;
+ _test_eof5: cs = 5; goto _test_eof;
+ _test_eof6: cs = 6; goto _test_eof;
+ _test_eof7: cs = 7; goto _test_eof;
+ _test_eof8: cs = 8; goto _test_eof;
+ _test_eof9: cs = 9; goto _test_eof;
+ _test_eof10: cs = 10; goto _test_eof;
+ _test_eof11: cs = 11; goto _test_eof;
+ _test_eof12: cs = 12; goto _test_eof;
+ _test_eof13: cs = 13; goto _test_eof;
+ _test_eof14: cs = 14; goto _test_eof;
+ _test_eof15: cs = 15; goto _test_eof;
+ _test_eof16: cs = 16; goto _test_eof;
+ _test_eof17: cs = 17; goto _test_eof;
+ _test_eof18: cs = 18; goto _test_eof;
+ _test_eof19: cs = 19; goto _test_eof;
+ _test_eof20: cs = 20; goto _test_eof;
+
+ _test_eof: {}
+ _out: {}
+ }
+
+#line 279 "parser.rl"
+
+ if (cs >= JSON_value_first_final) {
+ return p;
+ } else {
+ return NULL;
+ }
+}
+
+
+#line 779 "parser.c"
+enum {JSON_integer_start = 1};
+enum {JSON_integer_first_final = 3};
+enum {JSON_integer_error = 0};
+
+enum {JSON_integer_en_main = 1};
+
+
+#line 295 "parser.rl"
+
+
+static char *JSON_parse_integer(JSON_Parser *json, char *p, char *pe, VALUE *result)
+{
+ int cs = EVIL;
+
+
+#line 795 "parser.c"
+ {
+ cs = JSON_integer_start;
+ }
+
+#line 302 "parser.rl"
+ json->memo = p;
+
+#line 803 "parser.c"
+ {
+ if ( p == pe )
+ goto _test_eof;
+ switch ( cs )
+ {
+case 1:
+ switch( (*p) ) {
+ case 45: goto st2;
+ case 48: goto st3;
+ }
+ if ( 49 <= (*p) && (*p) <= 57 )
+ goto st5;
+ goto st0;
+st0:
+cs = 0;
+ goto _out;
+st2:
+ if ( ++p == pe )
+ goto _test_eof2;
+case 2:
+ if ( (*p) == 48 )
+ goto st3;
+ if ( 49 <= (*p) && (*p) <= 57 )
+ goto st5;
+ goto st0;
+st3:
+ if ( ++p == pe )
+ goto _test_eof3;
+case 3:
+ if ( 48 <= (*p) && (*p) <= 57 )
+ goto st0;
+ goto tr4;
+tr4:
+#line 292 "parser.rl"
+ { p--; {p++; cs = 4; goto _out;} }
+ goto st4;
+st4:
+ if ( ++p == pe )
+ goto _test_eof4;
+case 4:
+#line 844 "parser.c"
+ goto st0;
+st5:
+ if ( ++p == pe )
+ goto _test_eof5;
+case 5:
+ if ( 48 <= (*p) && (*p) <= 57 )
+ goto st5;
+ goto tr4;
+ }
+ _test_eof2: cs = 2; goto _test_eof;
+ _test_eof3: cs = 3; goto _test_eof;
+ _test_eof4: cs = 4; goto _test_eof;
+ _test_eof5: cs = 5; goto _test_eof;
+
+ _test_eof: {}
+ _out: {}
+ }
+
+#line 304 "parser.rl"
+
+ if (cs >= JSON_integer_first_final) {
+ long len = p - json->memo;
+ fbuffer_clear(json->fbuffer);
+ fbuffer_append(json->fbuffer, json->memo, len);
+ fbuffer_append_char(json->fbuffer, '\0');
+ *result = rb_cstr2inum(FBUFFER_PTR(json->fbuffer), 10);
+ return p + 1;
+ } else {
+ return NULL;
+ }
+}
+
+
+#line 878 "parser.c"
+enum {JSON_float_start = 1};
+enum {JSON_float_first_final = 8};
+enum {JSON_float_error = 0};
+
+enum {JSON_float_en_main = 1};
+
+
+#line 329 "parser.rl"
+
+
+static char *JSON_parse_float(JSON_Parser *json, char *p, char *pe, VALUE *result)
+{
+ int cs = EVIL;
+
+
+#line 894 "parser.c"
+ {
+ cs = JSON_float_start;
+ }
+
+#line 336 "parser.rl"
+ json->memo = p;
+
+#line 902 "parser.c"
+ {
+ if ( p == pe )
+ goto _test_eof;
+ switch ( cs )
+ {
+case 1:
+ switch( (*p) ) {
+ case 45: goto st2;
+ case 48: goto st3;
+ }
+ if ( 49 <= (*p) && (*p) <= 57 )
+ goto st7;
+ goto st0;
+st0:
+cs = 0;
+ goto _out;
+st2:
+ if ( ++p == pe )
+ goto _test_eof2;
+case 2:
+ if ( (*p) == 48 )
+ goto st3;
+ if ( 49 <= (*p) && (*p) <= 57 )
+ goto st7;
+ goto st0;
+st3:
+ if ( ++p == pe )
+ goto _test_eof3;
+case 3:
+ switch( (*p) ) {
+ case 46: goto st4;
+ case 69: goto st5;
+ case 101: goto st5;
+ }
+ goto st0;
+st4:
+ if ( ++p == pe )
+ goto _test_eof4;
+case 4:
+ if ( 48 <= (*p) && (*p) <= 57 )
+ goto st8;
+ goto st0;
+st8:
+ if ( ++p == pe )
+ goto _test_eof8;
+case 8:
+ switch( (*p) ) {
+ case 69: goto st5;
+ case 101: goto st5;
+ }
+ if ( (*p) > 46 ) {
+ if ( 48 <= (*p) && (*p) <= 57 )
+ goto st8;
+ } else if ( (*p) >= 45 )
+ goto st0;
+ goto tr9;
+tr9:
+#line 323 "parser.rl"
+ { p--; {p++; cs = 9; goto _out;} }
+ goto st9;
+st9:
+ if ( ++p == pe )
+ goto _test_eof9;
+case 9:
+#line 967 "parser.c"
+ goto st0;
+st5:
+ if ( ++p == pe )
+ goto _test_eof5;
+case 5:
+ switch( (*p) ) {
+ case 43: goto st6;
+ case 45: goto st6;
+ }
+ if ( 48 <= (*p) && (*p) <= 57 )
+ goto st10;
+ goto st0;
+st6:
+ if ( ++p == pe )
+ goto _test_eof6;
+case 6:
+ if ( 48 <= (*p) && (*p) <= 57 )
+ goto st10;
+ goto st0;
+st10:
+ if ( ++p == pe )
+ goto _test_eof10;
+case 10:
+ switch( (*p) ) {
+ case 69: goto st0;
+ case 101: goto st0;
+ }
+ if ( (*p) > 46 ) {
+ if ( 48 <= (*p) && (*p) <= 57 )
+ goto st10;
+ } else if ( (*p) >= 45 )
+ goto st0;
+ goto tr9;
+st7:
+ if ( ++p == pe )
+ goto _test_eof7;
+case 7:
+ switch( (*p) ) {
+ case 46: goto st4;
+ case 69: goto st5;
+ case 101: goto st5;
+ }
+ if ( 48 <= (*p) && (*p) <= 57 )
+ goto st7;
+ goto st0;
+ }
+ _test_eof2: cs = 2; goto _test_eof;
+ _test_eof3: cs = 3; goto _test_eof;
+ _test_eof4: cs = 4; goto _test_eof;
+ _test_eof8: cs = 8; goto _test_eof;
+ _test_eof9: cs = 9; goto _test_eof;
+ _test_eof5: cs = 5; goto _test_eof;
+ _test_eof6: cs = 6; goto _test_eof;
+ _test_eof10: cs = 10; goto _test_eof;
+ _test_eof7: cs = 7; goto _test_eof;
+
+ _test_eof: {}
+ _out: {}
+ }
+
+#line 338 "parser.rl"
+
+ if (cs >= JSON_float_first_final) {
+ long len = p - json->memo;
+ fbuffer_clear(json->fbuffer);
+ fbuffer_append(json->fbuffer, json->memo, len);
+ fbuffer_append_char(json->fbuffer, '\0');
+ *result = rb_float_new(rb_cstr_to_dbl(FBUFFER_PTR(json->fbuffer), 1));
+ return p + 1;
+ } else {
+ return NULL;
+ }
+}
+
+
+
+#line 1044 "parser.c"
+enum {JSON_array_start = 1};
+enum {JSON_array_first_final = 17};
+enum {JSON_array_error = 0};
+
+enum {JSON_array_en_main = 1};
+
+
+#line 381 "parser.rl"
+
+
+static char *JSON_parse_array(JSON_Parser *json, char *p, char *pe, VALUE *result)
+{
+ int cs = EVIL;
+ VALUE array_class = json->array_class;
+
+ if (json->max_nesting && json->current_nesting > json->max_nesting) {
+ rb_raise(eNestingError, "nesting of %d is too deep", json->current_nesting);
+ }
+ *result = NIL_P(array_class) ? rb_ary_new() : rb_class_new_instance(0, 0, array_class);
+
+
+#line 1066 "parser.c"
+ {
+ cs = JSON_array_start;
+ }
+
+#line 394 "parser.rl"
+
+#line 1073 "parser.c"
+ {
+ if ( p == pe )
+ goto _test_eof;
+ switch ( cs )
+ {
+case 1:
+ if ( (*p) == 91 )
+ goto st2;
+ goto st0;
+st0:
+cs = 0;
+ goto _out;
+st2:
+ if ( ++p == pe )
+ goto _test_eof2;
+case 2:
+ switch( (*p) ) {
+ case 13: goto st2;
+ case 32: goto st2;
+ case 34: goto tr2;
+ case 45: goto tr2;
+ case 47: goto st13;
+ case 73: goto tr2;
+ case 78: goto tr2;
+ case 91: goto tr2;
+ case 93: goto tr4;
+ case 102: goto tr2;
+ case 110: goto tr2;
+ case 116: goto tr2;
+ case 123: goto tr2;
+ }
+ if ( (*p) > 10 ) {
+ if ( 48 <= (*p) && (*p) <= 57 )
+ goto tr2;
+ } else if ( (*p) >= 9 )
+ goto st2;
+ goto st0;
+tr2:
+#line 358 "parser.rl"
+ {
+ VALUE v = Qnil;
+ char *np = JSON_parse_value(json, p, pe, &v);
+ if (np == NULL) {
+ p--; {p++; cs = 3; goto _out;}
+ } else {
+ if (NIL_P(json->array_class)) {
+ rb_ary_push(*result, v);
+ } else {
+ rb_funcall(*result, i_leftshift, 1, v);
+ }
+ {p = (( np))-1;}
+ }
+ }
+ goto st3;
+st3:
+ if ( ++p == pe )
+ goto _test_eof3;
+case 3:
+#line 1132 "parser.c"
+ switch( (*p) ) {
+ case 13: goto st3;
+ case 32: goto st3;
+ case 44: goto st4;
+ case 47: goto st9;
+ case 93: goto tr4;
+ }
+ if ( 9 <= (*p) && (*p) <= 10 )
+ goto st3;
+ goto st0;
+st4:
+ if ( ++p == pe )
+ goto _test_eof4;
+case 4:
+ switch( (*p) ) {
+ case 13: goto st4;
+ case 32: goto st4;
+ case 34: goto tr2;
+ case 45: goto tr2;
+ case 47: goto st5;
+ case 73: goto tr2;
+ case 78: goto tr2;
+ case 91: goto tr2;
+ case 102: goto tr2;
+ case 110: goto tr2;
+ case 116: goto tr2;
+ case 123: goto tr2;
+ }
+ if ( (*p) > 10 ) {
+ if ( 48 <= (*p) && (*p) <= 57 )
+ goto tr2;
+ } else if ( (*p) >= 9 )
+ goto st4;
+ goto st0;
+st5:
+ if ( ++p == pe )
+ goto _test_eof5;
+case 5:
+ switch( (*p) ) {
+ case 42: goto st6;
+ case 47: goto st8;
+ }
+ goto st0;
+st6:
+ if ( ++p == pe )
+ goto _test_eof6;
+case 6:
+ if ( (*p) == 42 )
+ goto st7;
+ goto st6;
+st7:
+ if ( ++p == pe )
+ goto _test_eof7;
+case 7:
+ switch( (*p) ) {
+ case 42: goto st7;
+ case 47: goto st4;
+ }
+ goto st6;
+st8:
+ if ( ++p == pe )
+ goto _test_eof8;
+case 8:
+ if ( (*p) == 10 )
+ goto st4;
+ goto st8;
+st9:
+ if ( ++p == pe )
+ goto _test_eof9;
+case 9:
+ switch( (*p) ) {
+ case 42: goto st10;
+ case 47: goto st12;
+ }
+ goto st0;
+st10:
+ if ( ++p == pe )
+ goto _test_eof10;
+case 10:
+ if ( (*p) == 42 )
+ goto st11;
+ goto st10;
+st11:
+ if ( ++p == pe )
+ goto _test_eof11;
+case 11:
+ switch( (*p) ) {
+ case 42: goto st11;
+ case 47: goto st3;
+ }
+ goto st10;
+st12:
+ if ( ++p == pe )
+ goto _test_eof12;
+case 12:
+ if ( (*p) == 10 )
+ goto st3;
+ goto st12;
+tr4:
+#line 373 "parser.rl"
+ { p--; {p++; cs = 17; goto _out;} }
+ goto st17;
+st17:
+ if ( ++p == pe )
+ goto _test_eof17;
+case 17:
+#line 1239 "parser.c"
+ goto st0;
+st13:
+ if ( ++p == pe )
+ goto _test_eof13;
+case 13:
+ switch( (*p) ) {
+ case 42: goto st14;
+ case 47: goto st16;
+ }
+ goto st0;
+st14:
+ if ( ++p == pe )
+ goto _test_eof14;
+case 14:
+ if ( (*p) == 42 )
+ goto st15;
+ goto st14;
+st15:
+ if ( ++p == pe )
+ goto _test_eof15;
+case 15:
+ switch( (*p) ) {
+ case 42: goto st15;
+ case 47: goto st2;
+ }
+ goto st14;
+st16:
+ if ( ++p == pe )
+ goto _test_eof16;
+case 16:
+ if ( (*p) == 10 )
+ goto st2;
+ goto st16;
+ }
+ _test_eof2: cs = 2; goto _test_eof;
+ _test_eof3: cs = 3; goto _test_eof;
+ _test_eof4: cs = 4; goto _test_eof;
+ _test_eof5: cs = 5; goto _test_eof;
+ _test_eof6: cs = 6; goto _test_eof;
+ _test_eof7: cs = 7; goto _test_eof;
+ _test_eof8: cs = 8; goto _test_eof;
+ _test_eof9: cs = 9; goto _test_eof;
+ _test_eof10: cs = 10; goto _test_eof;
+ _test_eof11: cs = 11; goto _test_eof;
+ _test_eof12: cs = 12; goto _test_eof;
+ _test_eof17: cs = 17; goto _test_eof;
+ _test_eof13: cs = 13; goto _test_eof;
+ _test_eof14: cs = 14; goto _test_eof;
+ _test_eof15: cs = 15; goto _test_eof;
+ _test_eof16: cs = 16; goto _test_eof;
+
+ _test_eof: {}
+ _out: {}
+ }
+
+#line 395 "parser.rl"
+
+ if(cs >= JSON_array_first_final) {
+ return p + 1;
+ } else {
+ rb_raise(eParserError, "%u: unexpected token at '%s'", __LINE__, p);
+ return NULL;
+ }
+}
+
+static VALUE json_string_unescape(VALUE result, char *string, char *stringEnd)
+{
+ char *p = string, *pe = string, *unescape;
+ int unescape_len;
+ char buf[4];
+
+ while (pe < stringEnd) {
+ if (*pe == '\\') {
+ unescape = (char *) "?";
+ unescape_len = 1;
+ if (pe > p) rb_str_buf_cat(result, p, pe - p);
+ switch (*++pe) {
+ case 'n':
+ unescape = (char *) "\n";
+ break;
+ case 'r':
+ unescape = (char *) "\r";
+ break;
+ case 't':
+ unescape = (char *) "\t";
+ break;
+ case '"':
+ unescape = (char *) "\"";
+ break;
+ case '\\':
+ unescape = (char *) "\\";
+ break;
+ case 'b':
+ unescape = (char *) "\b";
+ break;
+ case 'f':
+ unescape = (char *) "\f";
+ break;
+ case 'u':
+ if (pe > stringEnd - 4) {
+ return Qnil;
+ } else {
+ UTF32 ch = unescape_unicode((unsigned char *) ++pe);
+ pe += 3;
+ if (UNI_SUR_HIGH_START == (ch & 0xFC00)) {
+ pe++;
+ if (pe > stringEnd - 6) return Qnil;
+ if (pe[0] == '\\' && pe[1] == 'u') {
+ UTF32 sur = unescape_unicode((unsigned char *) pe + 2);
+ ch = (((ch & 0x3F) << 10) | ((((ch >> 6) & 0xF) + 1) << 16)
+ | (sur & 0x3FF));
+ pe += 5;
+ } else {
+ unescape = (char *) "?";
+ break;
+ }
+ }
+ unescape_len = convert_UTF32_to_UTF8(buf, ch);
+ unescape = buf;
+ }
+ break;
+ default:
+ p = pe;
+ continue;
+ }
+ rb_str_buf_cat(result, unescape, unescape_len);
+ p = ++pe;
+ } else {
+ pe++;
+ }
+ }
+ rb_str_buf_cat(result, p, pe - p);
+ return result;
+}
+
+
+#line 1376 "parser.c"
+enum {JSON_string_start = 1};
+enum {JSON_string_first_final = 8};
+enum {JSON_string_error = 0};
+
+enum {JSON_string_en_main = 1};
+
+
+#line 494 "parser.rl"
+
+
+static int
+match_i(VALUE regexp, VALUE klass, VALUE memo)
+{
+ if (regexp == Qundef) return ST_STOP;
+ if (RTEST(rb_funcall(klass, i_json_creatable_p, 0)) &&
+ RTEST(rb_funcall(regexp, i_match, 1, rb_ary_entry(memo, 0)))) {
+ rb_ary_push(memo, klass);
+ return ST_STOP;
+ }
+ return ST_CONTINUE;
+}
+
+static char *JSON_parse_string(JSON_Parser *json, char *p, char *pe, VALUE *result)
+{
+ int cs = EVIL;
+ VALUE match_string;
+
+ *result = rb_str_buf_new(0);
+
+#line 1406 "parser.c"
+ {
+ cs = JSON_string_start;
+ }
+
+#line 515 "parser.rl"
+ json->memo = p;
+
+#line 1414 "parser.c"
+ {
+ if ( p == pe )
+ goto _test_eof;
+ switch ( cs )
+ {
+case 1:
+ if ( (*p) == 34 )
+ goto st2;
+ goto st0;
+st0:
+cs = 0;
+ goto _out;
+st2:
+ if ( ++p == pe )
+ goto _test_eof2;
+case 2:
+ switch( (*p) ) {
+ case 34: goto tr2;
+ case 92: goto st3;
+ }
+ if ( 0 <= (*p) && (*p) <= 31 )
+ goto st0;
+ goto st2;
+tr2:
+#line 480 "parser.rl"
+ {
+ *result = json_string_unescape(*result, json->memo + 1, p);
+ if (NIL_P(*result)) {
+ p--;
+ {p++; cs = 8; goto _out;}
+ } else {
+ FORCE_UTF8(*result);
+ {p = (( p + 1))-1;}
+ }
+ }
+#line 491 "parser.rl"
+ { p--; {p++; cs = 8; goto _out;} }
+ goto st8;
+st8:
+ if ( ++p == pe )
+ goto _test_eof8;
+case 8:
+#line 1457 "parser.c"
+ goto st0;
+st3:
+ if ( ++p == pe )
+ goto _test_eof3;
+case 3:
+ if ( (*p) == 117 )
+ goto st4;
+ if ( 0 <= (*p) && (*p) <= 31 )
+ goto st0;
+ goto st2;
+st4:
+ if ( ++p == pe )
+ goto _test_eof4;
+case 4:
+ if ( (*p) < 65 ) {
+ if ( 48 <= (*p) && (*p) <= 57 )
+ goto st5;
+ } else if ( (*p) > 70 ) {
+ if ( 97 <= (*p) && (*p) <= 102 )
+ goto st5;
+ } else
+ goto st5;
+ goto st0;
+st5:
+ if ( ++p == pe )
+ goto _test_eof5;
+case 5:
+ if ( (*p) < 65 ) {
+ if ( 48 <= (*p) && (*p) <= 57 )
+ goto st6;
+ } else if ( (*p) > 70 ) {
+ if ( 97 <= (*p) && (*p) <= 102 )
+ goto st6;
+ } else
+ goto st6;
+ goto st0;
+st6:
+ if ( ++p == pe )
+ goto _test_eof6;
+case 6:
+ if ( (*p) < 65 ) {
+ if ( 48 <= (*p) && (*p) <= 57 )
+ goto st7;
+ } else if ( (*p) > 70 ) {
+ if ( 97 <= (*p) && (*p) <= 102 )
+ goto st7;
+ } else
+ goto st7;
+ goto st0;
+st7:
+ if ( ++p == pe )
+ goto _test_eof7;
+case 7:
+ if ( (*p) < 65 ) {
+ if ( 48 <= (*p) && (*p) <= 57 )
+ goto st2;
+ } else if ( (*p) > 70 ) {
+ if ( 97 <= (*p) && (*p) <= 102 )
+ goto st2;
+ } else
+ goto st2;
+ goto st0;
+ }
+ _test_eof2: cs = 2; goto _test_eof;
+ _test_eof8: cs = 8; goto _test_eof;
+ _test_eof3: cs = 3; goto _test_eof;
+ _test_eof4: cs = 4; goto _test_eof;
+ _test_eof5: cs = 5; goto _test_eof;
+ _test_eof6: cs = 6; goto _test_eof;
+ _test_eof7: cs = 7; goto _test_eof;
+
+ _test_eof: {}
+ _out: {}
+ }
+
+#line 517 "parser.rl"
+
+ if (json->create_additions && RTEST(match_string = json->match_string)) {
+ VALUE klass;
+ VALUE memo = rb_ary_new2(2);
+ rb_ary_push(memo, *result);
+ rb_hash_foreach(match_string, match_i, memo);
+ klass = rb_ary_entry(memo, 1);
+ if (RTEST(klass)) {
+ *result = rb_funcall(klass, i_json_create, 1, *result);
+ }
+ }
+
+ if (json->symbolize_names && json->parsing_name) {
+ *result = rb_str_intern(*result);
+ }
+ if (cs >= JSON_string_first_final) {
+ return p + 1;
+ } else {
+ return NULL;
+ }
+}
+
+/*
+ * Document-class: JSON::Ext::Parser
+ *
+ * This is the JSON parser implemented as a C extension. It can be configured
+ * to be used by setting
+ *
+ * JSON.parser = JSON::Ext::Parser
+ *
+ * with the method parser= in JSON.
+ *
+ */
+
+static VALUE convert_encoding(VALUE source)
+{
+ char *ptr = RSTRING_PTR(source);
+ long len = RSTRING_LEN(source);
+ if (len < 2) {
+ rb_raise(eParserError, "A JSON text must at least contain two octets!");
+ }
+#ifdef HAVE_RUBY_ENCODING_H
+ {
+ VALUE encoding = rb_funcall(source, i_encoding, 0);
+ if (encoding == CEncoding_ASCII_8BIT) {
+ if (len >= 4 && ptr[0] == 0 && ptr[1] == 0 && ptr[2] == 0) {
+ source = rb_funcall(source, i_encode, 2, CEncoding_UTF_8, CEncoding_UTF_32BE);
+ } else if (len >= 4 && ptr[0] == 0 && ptr[2] == 0) {
+ source = rb_funcall(source, i_encode, 2, CEncoding_UTF_8, CEncoding_UTF_16BE);
+ } else if (len >= 4 && ptr[1] == 0 && ptr[2] == 0 && ptr[3] == 0) {
+ source = rb_funcall(source, i_encode, 2, CEncoding_UTF_8, CEncoding_UTF_32LE);
+ } else if (len >= 4 && ptr[1] == 0 && ptr[3] == 0) {
+ source = rb_funcall(source, i_encode, 2, CEncoding_UTF_8, CEncoding_UTF_16LE);
+ } else {
+ source = rb_str_dup(source);
+ FORCE_UTF8(source);
+ }
+ } else {
+ source = rb_funcall(source, i_encode, 1, CEncoding_UTF_8);
+ }
+ }
+#else
+ if (len >= 4 && ptr[0] == 0 && ptr[1] == 0 && ptr[2] == 0) {
+ source = rb_funcall(mJSON, i_iconv, 3, rb_str_new2("utf-8"), rb_str_new2("utf-32be"), source);
+ } else if (len >= 4 && ptr[0] == 0 && ptr[2] == 0) {
+ source = rb_funcall(mJSON, i_iconv, 3, rb_str_new2("utf-8"), rb_str_new2("utf-16be"), source);
+ } else if (len >= 4 && ptr[1] == 0 && ptr[2] == 0 && ptr[3] == 0) {
+ source = rb_funcall(mJSON, i_iconv, 3, rb_str_new2("utf-8"), rb_str_new2("utf-32le"), source);
+ } else if (len >= 4 && ptr[1] == 0 && ptr[3] == 0) {
+ source = rb_funcall(mJSON, i_iconv, 3, rb_str_new2("utf-8"), rb_str_new2("utf-16le"), source);
+ }
+#endif
+ return source;
+}
+
+/*
+ * call-seq: new(source, opts => {})
+ *
+ * Creates a new JSON::Ext::Parser instance for the string _source_.
+ *
+ * Creates a new JSON::Ext::Parser instance for the string _source_.
+ *
+ * It will be configured by the _opts_ hash. _opts_ can have the following
+ * keys:
+ *
+ * _opts_ can have the following keys:
+ * * *max_nesting*: The maximum depth of nesting allowed in the parsed data
+ * structures. Disable depth checking with :max_nesting => false|nil|0, it
+ * defaults to 100.
+ * * *allow_nan*: If set to true, allow NaN, Infinity and -Infinity in
+ * defiance of RFC 4627 to be parsed by the Parser. This option defaults to
+ * false.
+ * * *symbolize_names*: If set to true, returns symbols for the names
+ * (keys) in a JSON object. Otherwise strings are returned, which is also
+ * the default.
+ * * *create_additions*: If set to false, the Parser doesn't create
+ * additions even if a matching class and create_id was found. This option
+ * defaults to false.
+ * * *object_class*: Defaults to Hash
+ * * *array_class*: Defaults to Array
+ */
+static VALUE cParser_initialize(int argc, VALUE *argv, VALUE self)
+{
+ VALUE source, opts;
+ GET_PARSER_INIT;
+
+ if (json->Vsource) {
+ rb_raise(rb_eTypeError, "already initialized instance");
+ }
+ rb_scan_args(argc, argv, "11", &source, &opts);
+ if (!NIL_P(opts)) {
+ opts = rb_convert_type(opts, T_HASH, "Hash", "to_hash");
+ if (NIL_P(opts)) {
+ rb_raise(rb_eArgError, "opts needs to be like a hash");
+ } else {
+ VALUE tmp = ID2SYM(i_max_nesting);
+ if (option_given_p(opts, tmp)) {
+ VALUE max_nesting = rb_hash_aref(opts, tmp);
+ if (RTEST(max_nesting)) {
+ Check_Type(max_nesting, T_FIXNUM);
+ json->max_nesting = FIX2INT(max_nesting);
+ } else {
+ json->max_nesting = 0;
+ }
+ } else {
+ json->max_nesting = 100;
+ }
+ tmp = ID2SYM(i_allow_nan);
+ if (option_given_p(opts, tmp)) {
+ json->allow_nan = RTEST(rb_hash_aref(opts, tmp)) ? 1 : 0;
+ } else {
+ json->allow_nan = 0;
+ }
+ tmp = ID2SYM(i_symbolize_names);
+ if (option_given_p(opts, tmp)) {
+ json->symbolize_names = RTEST(rb_hash_aref(opts, tmp)) ? 1 : 0;
+ } else {
+ json->symbolize_names = 0;
+ }
+ tmp = ID2SYM(i_quirks_mode);
+ if (option_given_p(opts, tmp)) {
+ VALUE quirks_mode = rb_hash_aref(opts, tmp);
+ json->quirks_mode = RTEST(quirks_mode) ? 1 : 0;
+ } else {
+ json->quirks_mode = 0;
+ }
+ tmp = ID2SYM(i_create_additions);
+ if (option_given_p(opts, tmp)) {
+ json->create_additions = RTEST(rb_hash_aref(opts, tmp));
+ } else {
+ json->create_additions = 0;
+ }
+ tmp = ID2SYM(i_create_id);
+ if (option_given_p(opts, tmp)) {
+ json->create_id = rb_hash_aref(opts, tmp);
+ } else {
+ json->create_id = rb_funcall(mJSON, i_create_id, 0);
+ }
+ tmp = ID2SYM(i_object_class);
+ if (option_given_p(opts, tmp)) {
+ json->object_class = rb_hash_aref(opts, tmp);
+ } else {
+ json->object_class = Qnil;
+ }
+ tmp = ID2SYM(i_array_class);
+ if (option_given_p(opts, tmp)) {
+ json->array_class = rb_hash_aref(opts, tmp);
+ } else {
+ json->array_class = Qnil;
+ }
+ tmp = ID2SYM(i_match_string);
+ if (option_given_p(opts, tmp)) {
+ VALUE match_string = rb_hash_aref(opts, tmp);
+ json->match_string = RTEST(match_string) ? match_string : Qnil;
+ } else {
+ json->match_string = Qnil;
+ }
+ }
+ } else {
+ json->max_nesting = 100;
+ json->allow_nan = 0;
+ json->create_additions = 1;
+ json->create_id = rb_funcall(mJSON, i_create_id, 0);
+ json->object_class = Qnil;
+ json->array_class = Qnil;
+ }
+ source = rb_convert_type(source, T_STRING, "String", "to_str");
+ if (!json->quirks_mode) {
+ source = convert_encoding(StringValue(source));
+ }
+ json->current_nesting = 0;
+ StringValue(source);
+ json->len = RSTRING_LEN(source);
+ json->source = RSTRING_PTR(source);;
+ json->Vsource = source;
+ return self;
+}
+
+
+#line 1733 "parser.c"
+enum {JSON_start = 1};
+enum {JSON_first_final = 10};
+enum {JSON_error = 0};
+
+enum {JSON_en_main = 1};
+
+
+#line 740 "parser.rl"
+
+
+static VALUE cParser_parse_strict(VALUE self)
+{
+ char *p, *pe;
+ int cs = EVIL;
+ VALUE result = Qnil;
+ GET_PARSER;
+
+
+#line 1752 "parser.c"
+ {
+ cs = JSON_start;
+ }
+
+#line 750 "parser.rl"
+ p = json->source;
+ pe = p + json->len;
+
+#line 1761 "parser.c"
+ {
+ if ( p == pe )
+ goto _test_eof;
+ switch ( cs )
+ {
+st1:
+ if ( ++p == pe )
+ goto _test_eof1;
+case 1:
+ switch( (*p) ) {
+ case 13: goto st1;
+ case 32: goto st1;
+ case 47: goto st2;
+ case 91: goto tr3;
+ case 123: goto tr4;
+ }
+ if ( 9 <= (*p) && (*p) <= 10 )
+ goto st1;
+ goto st0;
+st0:
+cs = 0;
+ goto _out;
+st2:
+ if ( ++p == pe )
+ goto _test_eof2;
+case 2:
+ switch( (*p) ) {
+ case 42: goto st3;
+ case 47: goto st5;
+ }
+ goto st0;
+st3:
+ if ( ++p == pe )
+ goto _test_eof3;
+case 3:
+ if ( (*p) == 42 )
+ goto st4;
+ goto st3;
+st4:
+ if ( ++p == pe )
+ goto _test_eof4;
+case 4:
+ switch( (*p) ) {
+ case 42: goto st4;
+ case 47: goto st1;
+ }
+ goto st3;
+st5:
+ if ( ++p == pe )
+ goto _test_eof5;
+case 5:
+ if ( (*p) == 10 )
+ goto st1;
+ goto st5;
+tr3:
+#line 729 "parser.rl"
+ {
+ char *np;
+ json->current_nesting = 1;
+ np = JSON_parse_array(json, p, pe, &result);
+ if (np == NULL) { p--; {p++; cs = 10; goto _out;} } else {p = (( np))-1;}
+ }
+ goto st10;
+tr4:
+#line 722 "parser.rl"
+ {
+ char *np;
+ json->current_nesting = 1;
+ np = JSON_parse_object(json, p, pe, &result);
+ if (np == NULL) { p--; {p++; cs = 10; goto _out;} } else {p = (( np))-1;}
+ }
+ goto st10;
+st10:
+ if ( ++p == pe )
+ goto _test_eof10;
+case 10:
+#line 1838 "parser.c"
+ switch( (*p) ) {
+ case 13: goto st10;
+ case 32: goto st10;
+ case 47: goto st6;
+ }
+ if ( 9 <= (*p) && (*p) <= 10 )
+ goto st10;
+ goto st0;
+st6:
+ if ( ++p == pe )
+ goto _test_eof6;
+case 6:
+ switch( (*p) ) {
+ case 42: goto st7;
+ case 47: goto st9;
+ }
+ goto st0;
+st7:
+ if ( ++p == pe )
+ goto _test_eof7;
+case 7:
+ if ( (*p) == 42 )
+ goto st8;
+ goto st7;
+st8:
+ if ( ++p == pe )
+ goto _test_eof8;
+case 8:
+ switch( (*p) ) {
+ case 42: goto st8;
+ case 47: goto st10;
+ }
+ goto st7;
+st9:
+ if ( ++p == pe )
+ goto _test_eof9;
+case 9:
+ if ( (*p) == 10 )
+ goto st10;
+ goto st9;
+ }
+ _test_eof1: cs = 1; goto _test_eof;
+ _test_eof2: cs = 2; goto _test_eof;
+ _test_eof3: cs = 3; goto _test_eof;
+ _test_eof4: cs = 4; goto _test_eof;
+ _test_eof5: cs = 5; goto _test_eof;
+ _test_eof10: cs = 10; goto _test_eof;
+ _test_eof6: cs = 6; goto _test_eof;
+ _test_eof7: cs = 7; goto _test_eof;
+ _test_eof8: cs = 8; goto _test_eof;
+ _test_eof9: cs = 9; goto _test_eof;
+
+ _test_eof: {}
+ _out: {}
+ }
+
+#line 753 "parser.rl"
+
+ if (cs >= JSON_first_final && p == pe) {
+ return result;
+ } else {
+ rb_raise(eParserError, "%u: unexpected token at '%s'", __LINE__, p);
+ return Qnil;
+ }
+}
+
+
+
+#line 1907 "parser.c"
+enum {JSON_quirks_mode_start = 1};
+enum {JSON_quirks_mode_first_final = 10};
+enum {JSON_quirks_mode_error = 0};
+
+enum {JSON_quirks_mode_en_main = 1};
+
+
+#line 778 "parser.rl"
+
+
+static VALUE cParser_parse_quirks_mode(VALUE self)
+{
+ char *p, *pe;
+ int cs = EVIL;
+ VALUE result = Qnil;
+ GET_PARSER;
+
+
+#line 1926 "parser.c"
+ {
+ cs = JSON_quirks_mode_start;
+ }
+
+#line 788 "parser.rl"
+ p = json->source;
+ pe = p + json->len;
+
+#line 1935 "parser.c"
+ {
+ if ( p == pe )
+ goto _test_eof;
+ switch ( cs )
+ {
+st1:
+ if ( ++p == pe )
+ goto _test_eof1;
+case 1:
+ switch( (*p) ) {
+ case 13: goto st1;
+ case 32: goto st1;
+ case 34: goto tr2;
+ case 45: goto tr2;
+ case 47: goto st6;
+ case 73: goto tr2;
+ case 78: goto tr2;
+ case 91: goto tr2;
+ case 102: goto tr2;
+ case 110: goto tr2;
+ case 116: goto tr2;
+ case 123: goto tr2;
+ }
+ if ( (*p) > 10 ) {
+ if ( 48 <= (*p) && (*p) <= 57 )
+ goto tr2;
+ } else if ( (*p) >= 9 )
+ goto st1;
+ goto st0;
+st0:
+cs = 0;
+ goto _out;
+tr2:
+#line 770 "parser.rl"
+ {
+ char *np = JSON_parse_value(json, p, pe, &result);
+ if (np == NULL) { p--; {p++; cs = 10; goto _out;} } else {p = (( np))-1;}
+ }
+ goto st10;
+st10:
+ if ( ++p == pe )
+ goto _test_eof10;
+case 10:
+#line 1979 "parser.c"
+ switch( (*p) ) {
+ case 13: goto st10;
+ case 32: goto st10;
+ case 47: goto st2;
+ }
+ if ( 9 <= (*p) && (*p) <= 10 )
+ goto st10;
+ goto st0;
+st2:
+ if ( ++p == pe )
+ goto _test_eof2;
+case 2:
+ switch( (*p) ) {
+ case 42: goto st3;
+ case 47: goto st5;
+ }
+ goto st0;
+st3:
+ if ( ++p == pe )
+ goto _test_eof3;
+case 3:
+ if ( (*p) == 42 )
+ goto st4;
+ goto st3;
+st4:
+ if ( ++p == pe )
+ goto _test_eof4;
+case 4:
+ switch( (*p) ) {
+ case 42: goto st4;
+ case 47: goto st10;
+ }
+ goto st3;
+st5:
+ if ( ++p == pe )
+ goto _test_eof5;
+case 5:
+ if ( (*p) == 10 )
+ goto st10;
+ goto st5;
+st6:
+ if ( ++p == pe )
+ goto _test_eof6;
+case 6:
+ switch( (*p) ) {
+ case 42: goto st7;
+ case 47: goto st9;
+ }
+ goto st0;
+st7:
+ if ( ++p == pe )
+ goto _test_eof7;
+case 7:
+ if ( (*p) == 42 )
+ goto st8;
+ goto st7;
+st8:
+ if ( ++p == pe )
+ goto _test_eof8;
+case 8:
+ switch( (*p) ) {
+ case 42: goto st8;
+ case 47: goto st1;
+ }
+ goto st7;
+st9:
+ if ( ++p == pe )
+ goto _test_eof9;
+case 9:
+ if ( (*p) == 10 )
+ goto st1;
+ goto st9;
+ }
+ _test_eof1: cs = 1; goto _test_eof;
+ _test_eof10: cs = 10; goto _test_eof;
+ _test_eof2: cs = 2; goto _test_eof;
+ _test_eof3: cs = 3; goto _test_eof;
+ _test_eof4: cs = 4; goto _test_eof;
+ _test_eof5: cs = 5; goto _test_eof;
+ _test_eof6: cs = 6; goto _test_eof;
+ _test_eof7: cs = 7; goto _test_eof;
+ _test_eof8: cs = 8; goto _test_eof;
+ _test_eof9: cs = 9; goto _test_eof;
+
+ _test_eof: {}
+ _out: {}
+ }
+
+#line 791 "parser.rl"
+
+ if (cs >= JSON_quirks_mode_first_final && p == pe) {
+ return result;
+ } else {
+ rb_raise(eParserError, "%u: unexpected token at '%s'", __LINE__, p);
+ return Qnil;
+ }
+}
+
+/*
+ * call-seq: parse()
+ *
+ * Parses the current JSON text _source_ and returns the complete data
+ * structure as a result.
+ */
+static VALUE cParser_parse(VALUE self)
+{
+ GET_PARSER;
+
+ if (json->quirks_mode) {
+ return cParser_parse_quirks_mode(self);
+ } else {
+ return cParser_parse_strict(self);
+ }
+}
+
+
+static JSON_Parser *JSON_allocate(void)
+{
+ JSON_Parser *json = ZALLOC(JSON_Parser);
+ json->fbuffer = fbuffer_alloc(0);
+ return json;
+}
+
+static void JSON_mark(void *ptr)
+{
+ JSON_Parser *json = ptr;
+ rb_gc_mark_maybe(json->Vsource);
+ rb_gc_mark_maybe(json->create_id);
+ rb_gc_mark_maybe(json->object_class);
+ rb_gc_mark_maybe(json->array_class);
+ rb_gc_mark_maybe(json->match_string);
+}
+
+static void JSON_free(void *ptr)
+{
+ JSON_Parser *json = ptr;
+ fbuffer_free(json->fbuffer);
+ ruby_xfree(json);
+}
+
+static size_t JSON_memsize(const void *ptr)
+{
+ const JSON_Parser *json = ptr;
+ return sizeof(*json) + FBUFFER_CAPA(json->fbuffer);
+}
+
+#ifdef NEW_TYPEDDATA_WRAPPER
+static const rb_data_type_t JSON_Parser_type = {
+ "JSON/Parser",
+ {JSON_mark, JSON_free, JSON_memsize,},
+#ifdef RUBY_TYPED_FREE_IMMEDIATELY
+ 0, 0,
+ RUBY_TYPED_FREE_IMMEDIATELY,
+#endif
+};
+#endif
+
+static VALUE cJSON_parser_s_allocate(VALUE klass)
+{
+ JSON_Parser *json = JSON_allocate();
+ return TypedData_Wrap_Struct(klass, &JSON_Parser_type, json);
+}
+
+/*
+ * call-seq: source()
+ *
+ * Returns a copy of the current _source_ string, that was used to construct
+ * this Parser.
+ */
+static VALUE cParser_source(VALUE self)
+{
+ GET_PARSER;
+ return rb_str_dup(json->Vsource);
+}
+
+/*
+ * call-seq: quirks_mode?()
+ *
+ * Returns a true, if this parser is in quirks_mode, false otherwise.
+ */
+static VALUE cParser_quirks_mode_p(VALUE self)
+{
+ GET_PARSER;
+ return json->quirks_mode ? Qtrue : Qfalse;
+}
+
+
+void Init_parser(void)
+{
+ rb_require("json/common");
+ mJSON = rb_define_module("JSON");
+ mExt = rb_define_module_under(mJSON, "Ext");
+ cParser = rb_define_class_under(mExt, "Parser", rb_cObject);
+ eParserError = rb_path2class("JSON::ParserError");
+ eNestingError = rb_path2class("JSON::NestingError");
+ rb_define_alloc_func(cParser, cJSON_parser_s_allocate);
+ rb_define_method(cParser, "initialize", cParser_initialize, -1);
+ rb_define_method(cParser, "parse", cParser_parse, 0);
+ rb_define_method(cParser, "source", cParser_source, 0);
+ rb_define_method(cParser, "quirks_mode?", cParser_quirks_mode_p, 0);
+
+ CNaN = rb_const_get(mJSON, rb_intern("NaN"));
+ CInfinity = rb_const_get(mJSON, rb_intern("Infinity"));
+ CMinusInfinity = rb_const_get(mJSON, rb_intern("MinusInfinity"));
+
+ i_json_creatable_p = rb_intern("json_creatable?");
+ i_json_create = rb_intern("json_create");
+ i_create_id = rb_intern("create_id");
+ i_create_additions = rb_intern("create_additions");
+ i_chr = rb_intern("chr");
+ i_max_nesting = rb_intern("max_nesting");
+ i_allow_nan = rb_intern("allow_nan");
+ i_symbolize_names = rb_intern("symbolize_names");
+ i_quirks_mode = rb_intern("quirks_mode");
+ i_object_class = rb_intern("object_class");
+ i_array_class = rb_intern("array_class");
+ i_match = rb_intern("match");
+ i_match_string = rb_intern("match_string");
+ i_key_p = rb_intern("key?");
+ i_deep_const_get = rb_intern("deep_const_get");
+ i_aset = rb_intern("[]=");
+ i_aref = rb_intern("[]");
+ i_leftshift = rb_intern("<<");
+#ifdef HAVE_RUBY_ENCODING_H
+ CEncoding_UTF_8 = rb_funcall(rb_path2class("Encoding"), rb_intern("find"), 1, rb_str_new2("utf-8"));
+ CEncoding_UTF_16BE = rb_funcall(rb_path2class("Encoding"), rb_intern("find"), 1, rb_str_new2("utf-16be"));
+ CEncoding_UTF_16LE = rb_funcall(rb_path2class("Encoding"), rb_intern("find"), 1, rb_str_new2("utf-16le"));
+ CEncoding_UTF_32BE = rb_funcall(rb_path2class("Encoding"), rb_intern("find"), 1, rb_str_new2("utf-32be"));
+ CEncoding_UTF_32LE = rb_funcall(rb_path2class("Encoding"), rb_intern("find"), 1, rb_str_new2("utf-32le"));
+ CEncoding_ASCII_8BIT = rb_funcall(rb_path2class("Encoding"), rb_intern("find"), 1, rb_str_new2("ascii-8bit"));
+ i_encoding = rb_intern("encoding");
+ i_encode = rb_intern("encode");
+#else
+ i_iconv = rb_intern("iconv");
+#endif
+}
+
+/*
+ * Local variables:
+ * mode: c
+ * c-file-style: ruby
+ * indent-tabs-mode: nil
+ * End:
+ */
diff --git a/ext/json/parser/parser.h b/ext/json/parser/parser.h
new file mode 100644
index 0000000000..e98f26a297
--- /dev/null
+++ b/ext/json/parser/parser.h
@@ -0,0 +1,93 @@
+#ifndef _PARSER_H_
+#define _PARSER_H_
+
+#include "ruby.h"
+
+#ifndef HAVE_RUBY_RE_H
+#include "re.h"
+#endif
+
+#ifdef HAVE_RUBY_ST_H
+#include "ruby/st.h"
+#else
+#include "st.h"
+#endif
+
+#define option_given_p(opts, key) RTEST(rb_funcall(opts, i_key_p, 1, key))
+
+/* unicode */
+
+typedef unsigned long UTF32; /* at least 32 bits */
+typedef unsigned short UTF16; /* at least 16 bits */
+typedef unsigned char UTF8; /* typically 8 bits */
+
+#define UNI_REPLACEMENT_CHAR (UTF32)0x0000FFFD
+#define UNI_SUR_HIGH_START (UTF32)0xD800
+#define UNI_SUR_HIGH_END (UTF32)0xDBFF
+#define UNI_SUR_LOW_START (UTF32)0xDC00
+#define UNI_SUR_LOW_END (UTF32)0xDFFF
+
+typedef struct JSON_ParserStruct {
+ VALUE Vsource;
+ char *source;
+ long len;
+ char *memo;
+ VALUE create_id;
+ int max_nesting;
+ int current_nesting;
+ int allow_nan;
+ int parsing_name;
+ int symbolize_names;
+ int quirks_mode;
+ VALUE object_class;
+ VALUE array_class;
+ int create_additions;
+ VALUE match_string;
+ FBuffer *fbuffer;
+} JSON_Parser;
+
+#define GET_PARSER \
+ GET_PARSER_INIT; \
+ if (!json->Vsource) rb_raise(rb_eTypeError, "uninitialized instance")
+#define GET_PARSER_INIT \
+ JSON_Parser *json; \
+ TypedData_Get_Struct(self, JSON_Parser, &JSON_Parser_type, json)
+
+#define MinusInfinity "-Infinity"
+#define EVIL 0x666
+
+static UTF32 unescape_unicode(const unsigned char *p);
+static int convert_UTF32_to_UTF8(char *buf, UTF32 ch);
+static char *JSON_parse_object(JSON_Parser *json, char *p, char *pe, VALUE *result);
+static char *JSON_parse_value(JSON_Parser *json, char *p, char *pe, VALUE *result);
+static char *JSON_parse_integer(JSON_Parser *json, char *p, char *pe, VALUE *result);
+static char *JSON_parse_float(JSON_Parser *json, char *p, char *pe, VALUE *result);
+static char *JSON_parse_array(JSON_Parser *json, char *p, char *pe, VALUE *result);
+static VALUE json_string_unescape(VALUE result, char *string, char *stringEnd);
+static char *JSON_parse_string(JSON_Parser *json, char *p, char *pe, VALUE *result);
+static VALUE convert_encoding(VALUE source);
+static VALUE cParser_initialize(int argc, VALUE *argv, VALUE self);
+static VALUE cParser_parse(VALUE self);
+static JSON_Parser *JSON_allocate(void);
+static void JSON_mark(void *json);
+static void JSON_free(void *json);
+static VALUE cJSON_parser_s_allocate(VALUE klass);
+static VALUE cParser_source(VALUE self);
+#ifndef ZALLOC
+#define ZALLOC(type) ((type *)ruby_zalloc(sizeof(type)))
+static inline void *ruby_zalloc(size_t n)
+{
+ void *p = ruby_xmalloc(n);
+ memset(p, 0, n);
+ return p;
+}
+#endif
+#ifdef TypedData_Wrap_Struct
+static const rb_data_type_t JSON_Parser_type;
+#define NEW_TYPEDDATA_WRAPPER 1
+#else
+#define TypedData_Wrap_Struct(klass, ignore, json) Data_Wrap_Struct(klass, JSON_mark, JSON_free, json)
+#define TypedData_Get_Struct(self, JSON_Parser, ignore, json) Data_Get_Struct(self, JSON_Parser, json)
+#endif
+
+#endif
diff --git a/ext/json/parser/parser.rl b/ext/json/parser/parser.rl
new file mode 100644
index 0000000000..b9b51aaa4b
--- /dev/null
+++ b/ext/json/parser/parser.rl
@@ -0,0 +1,945 @@
+#include "../fbuffer/fbuffer.h"
+#include "parser.h"
+
+/* unicode */
+
+static const char digit_values[256] = {
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, -1,
+ -1, -1, -1, -1, -1, -1, 10, 11, 12, 13, 14, 15, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ 10, 11, 12, 13, 14, 15, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1
+};
+
+static UTF32 unescape_unicode(const unsigned char *p)
+{
+ char b;
+ UTF32 result = 0;
+ b = digit_values[p[0]];
+ if (b < 0) return UNI_REPLACEMENT_CHAR;
+ result = (result << 4) | (unsigned char)b;
+ b = digit_values[p[1]];
+ if (b < 0) return UNI_REPLACEMENT_CHAR;
+ result = (result << 4) | (unsigned char)b;
+ b = digit_values[p[2]];
+ if (b < 0) return UNI_REPLACEMENT_CHAR;
+ result = (result << 4) | (unsigned char)b;
+ b = digit_values[p[3]];
+ if (b < 0) return UNI_REPLACEMENT_CHAR;
+ result = (result << 4) | (unsigned char)b;
+ return result;
+}
+
+static int convert_UTF32_to_UTF8(char *buf, UTF32 ch)
+{
+ int len = 1;
+ if (ch <= 0x7F) {
+ buf[0] = (char) ch;
+ } else if (ch <= 0x07FF) {
+ buf[0] = (char) ((ch >> 6) | 0xC0);
+ buf[1] = (char) ((ch & 0x3F) | 0x80);
+ len++;
+ } else if (ch <= 0xFFFF) {
+ buf[0] = (char) ((ch >> 12) | 0xE0);
+ buf[1] = (char) (((ch >> 6) & 0x3F) | 0x80);
+ buf[2] = (char) ((ch & 0x3F) | 0x80);
+ len += 2;
+ } else if (ch <= 0x1fffff) {
+ buf[0] =(char) ((ch >> 18) | 0xF0);
+ buf[1] =(char) (((ch >> 12) & 0x3F) | 0x80);
+ buf[2] =(char) (((ch >> 6) & 0x3F) | 0x80);
+ buf[3] =(char) ((ch & 0x3F) | 0x80);
+ len += 3;
+ } else {
+ buf[0] = '?';
+ }
+ return len;
+}
+
+#ifdef HAVE_RUBY_ENCODING_H
+static VALUE CEncoding_ASCII_8BIT, CEncoding_UTF_8, CEncoding_UTF_16BE,
+ CEncoding_UTF_16LE, CEncoding_UTF_32BE, CEncoding_UTF_32LE;
+static ID i_encoding, i_encode;
+#else
+static ID i_iconv;
+#endif
+
+static VALUE mJSON, mExt, cParser, eParserError, eNestingError;
+static VALUE CNaN, CInfinity, CMinusInfinity;
+
+static ID i_json_creatable_p, i_json_create, i_create_id, i_create_additions,
+ i_chr, i_max_nesting, i_allow_nan, i_symbolize_names, i_quirks_mode,
+ i_object_class, i_array_class, i_key_p, i_deep_const_get, i_match,
+ i_match_string, i_aset, i_aref, i_leftshift;
+
+%%{
+ machine JSON_common;
+
+ cr = '\n';
+ cr_neg = [^\n];
+ ws = [ \t\r\n];
+ c_comment = '/*' ( any* - (any* '*/' any* ) ) '*/';
+ cpp_comment = '//' cr_neg* cr;
+ comment = c_comment | cpp_comment;
+ ignore = ws | comment;
+ name_separator = ':';
+ value_separator = ',';
+ Vnull = 'null';
+ Vfalse = 'false';
+ Vtrue = 'true';
+ VNaN = 'NaN';
+ VInfinity = 'Infinity';
+ VMinusInfinity = '-Infinity';
+ begin_value = [nft\"\-\[\{NI] | digit;
+ begin_object = '{';
+ end_object = '}';
+ begin_array = '[';
+ end_array = ']';
+ begin_string = '"';
+ begin_name = begin_string;
+ begin_number = digit | '-';
+}%%
+
+%%{
+ machine JSON_object;
+ include JSON_common;
+
+ write data;
+
+ action parse_value {
+ VALUE v = Qnil;
+ char *np = JSON_parse_value(json, fpc, pe, &v);
+ if (np == NULL) {
+ fhold; fbreak;
+ } else {
+ if (NIL_P(json->object_class)) {
+ rb_hash_aset(*result, last_name, v);
+ } else {
+ rb_funcall(*result, i_aset, 2, last_name, v);
+ }
+ fexec np;
+ }
+ }
+
+ action parse_name {
+ char *np;
+ json->parsing_name = 1;
+ np = JSON_parse_string(json, fpc, pe, &last_name);
+ json->parsing_name = 0;
+ if (np == NULL) { fhold; fbreak; } else fexec np;
+ }
+
+ action exit { fhold; fbreak; }
+
+ pair = ignore* begin_name >parse_name ignore* name_separator ignore* begin_value >parse_value;
+ next_pair = ignore* value_separator pair;
+
+ main := (
+ begin_object
+ (pair (next_pair)*)? ignore*
+ end_object
+ ) @exit;
+}%%
+
+static char *JSON_parse_object(JSON_Parser *json, char *p, char *pe, VALUE *result)
+{
+ int cs = EVIL;
+ VALUE last_name = Qnil;
+ VALUE object_class = json->object_class;
+
+ if (json->max_nesting && json->current_nesting > json->max_nesting) {
+ rb_raise(eNestingError, "nesting of %d is too deep", json->current_nesting);
+ }
+
+ *result = NIL_P(object_class) ? rb_hash_new() : rb_class_new_instance(0, 0, object_class);
+
+ %% write init;
+ %% write exec;
+
+ if (cs >= JSON_object_first_final) {
+ if (json->create_additions) {
+ VALUE klassname;
+ if (NIL_P(json->object_class)) {
+ klassname = rb_hash_aref(*result, json->create_id);
+ } else {
+ klassname = rb_funcall(*result, i_aref, 1, json->create_id);
+ }
+ if (!NIL_P(klassname)) {
+ VALUE klass = rb_funcall(mJSON, i_deep_const_get, 1, klassname);
+ if (RTEST(rb_funcall(klass, i_json_creatable_p, 0))) {
+ *result = rb_funcall(klass, i_json_create, 1, *result);
+ }
+ }
+ }
+ return p + 1;
+ } else {
+ return NULL;
+ }
+}
+
+
+%%{
+ machine JSON_value;
+ include JSON_common;
+
+ write data;
+
+ action parse_null {
+ *result = Qnil;
+ }
+ action parse_false {
+ *result = Qfalse;
+ }
+ action parse_true {
+ *result = Qtrue;
+ }
+ action parse_nan {
+ if (json->allow_nan) {
+ *result = CNaN;
+ } else {
+ rb_raise(eParserError, "%u: unexpected token at '%s'", __LINE__, p - 2);
+ }
+ }
+ action parse_infinity {
+ if (json->allow_nan) {
+ *result = CInfinity;
+ } else {
+ rb_raise(eParserError, "%u: unexpected token at '%s'", __LINE__, p - 8);
+ }
+ }
+ action parse_string {
+ char *np = JSON_parse_string(json, fpc, pe, result);
+ if (np == NULL) { fhold; fbreak; } else fexec np;
+ }
+
+ action parse_number {
+ char *np;
+ if(pe > fpc + 9 - json->quirks_mode && !strncmp(MinusInfinity, fpc, 9)) {
+ if (json->allow_nan) {
+ *result = CMinusInfinity;
+ fexec p + 10;
+ fhold; fbreak;
+ } else {
+ rb_raise(eParserError, "%u: unexpected token at '%s'", __LINE__, p);
+ }
+ }
+ np = JSON_parse_float(json, fpc, pe, result);
+ if (np != NULL) fexec np;
+ np = JSON_parse_integer(json, fpc, pe, result);
+ if (np != NULL) fexec np;
+ fhold; fbreak;
+ }
+
+ action parse_array {
+ char *np;
+ json->current_nesting++;
+ np = JSON_parse_array(json, fpc, pe, result);
+ json->current_nesting--;
+ if (np == NULL) { fhold; fbreak; } else fexec np;
+ }
+
+ action parse_object {
+ char *np;
+ json->current_nesting++;
+ np = JSON_parse_object(json, fpc, pe, result);
+ json->current_nesting--;
+ if (np == NULL) { fhold; fbreak; } else fexec np;
+ }
+
+ action exit { fhold; fbreak; }
+
+main := (
+ Vnull @parse_null |
+ Vfalse @parse_false |
+ Vtrue @parse_true |
+ VNaN @parse_nan |
+ VInfinity @parse_infinity |
+ begin_number >parse_number |
+ begin_string >parse_string |
+ begin_array >parse_array |
+ begin_object >parse_object
+ ) %*exit;
+}%%
+
+static char *JSON_parse_value(JSON_Parser *json, char *p, char *pe, VALUE *result)
+{
+ int cs = EVIL;
+
+ %% write init;
+ %% write exec;
+
+ if (cs >= JSON_value_first_final) {
+ return p;
+ } else {
+ return NULL;
+ }
+}
+
+%%{
+ machine JSON_integer;
+
+ write data;
+
+ action exit { fhold; fbreak; }
+
+ main := '-'? ('0' | [1-9][0-9]*) (^[0-9]? @exit);
+}%%
+
+static char *JSON_parse_integer(JSON_Parser *json, char *p, char *pe, VALUE *result)
+{
+ int cs = EVIL;
+
+ %% write init;
+ json->memo = p;
+ %% write exec;
+
+ if (cs >= JSON_integer_first_final) {
+ long len = p - json->memo;
+ fbuffer_clear(json->fbuffer);
+ fbuffer_append(json->fbuffer, json->memo, len);
+ fbuffer_append_char(json->fbuffer, '\0');
+ *result = rb_cstr2inum(FBUFFER_PTR(json->fbuffer), 10);
+ return p + 1;
+ } else {
+ return NULL;
+ }
+}
+
+%%{
+ machine JSON_float;
+ include JSON_common;
+
+ write data;
+
+ action exit { fhold; fbreak; }
+
+ main := '-'? (
+ (('0' | [1-9][0-9]*) '.' [0-9]+ ([Ee] [+\-]?[0-9]+)?)
+ | (('0' | [1-9][0-9]*) ([Ee] [+\-]?[0-9]+))
+ ) (^[0-9Ee.\-]? @exit );
+}%%
+
+static char *JSON_parse_float(JSON_Parser *json, char *p, char *pe, VALUE *result)
+{
+ int cs = EVIL;
+
+ %% write init;
+ json->memo = p;
+ %% write exec;
+
+ if (cs >= JSON_float_first_final) {
+ long len = p - json->memo;
+ fbuffer_clear(json->fbuffer);
+ fbuffer_append(json->fbuffer, json->memo, len);
+ fbuffer_append_char(json->fbuffer, '\0');
+ *result = rb_float_new(rb_cstr_to_dbl(FBUFFER_PTR(json->fbuffer), 1));
+ return p + 1;
+ } else {
+ return NULL;
+ }
+}
+
+
+%%{
+ machine JSON_array;
+ include JSON_common;
+
+ write data;
+
+ action parse_value {
+ VALUE v = Qnil;
+ char *np = JSON_parse_value(json, fpc, pe, &v);
+ if (np == NULL) {
+ fhold; fbreak;
+ } else {
+ if (NIL_P(json->array_class)) {
+ rb_ary_push(*result, v);
+ } else {
+ rb_funcall(*result, i_leftshift, 1, v);
+ }
+ fexec np;
+ }
+ }
+
+ action exit { fhold; fbreak; }
+
+ next_element = value_separator ignore* begin_value >parse_value;
+
+ main := begin_array ignore*
+ ((begin_value >parse_value ignore*)
+ (ignore* next_element ignore*)*)?
+ end_array @exit;
+}%%
+
+static char *JSON_parse_array(JSON_Parser *json, char *p, char *pe, VALUE *result)
+{
+ int cs = EVIL;
+ VALUE array_class = json->array_class;
+
+ if (json->max_nesting && json->current_nesting > json->max_nesting) {
+ rb_raise(eNestingError, "nesting of %d is too deep", json->current_nesting);
+ }
+ *result = NIL_P(array_class) ? rb_ary_new() : rb_class_new_instance(0, 0, array_class);
+
+ %% write init;
+ %% write exec;
+
+ if(cs >= JSON_array_first_final) {
+ return p + 1;
+ } else {
+ rb_raise(eParserError, "%u: unexpected token at '%s'", __LINE__, p);
+ return NULL;
+ }
+}
+
+static VALUE json_string_unescape(VALUE result, char *string, char *stringEnd)
+{
+ char *p = string, *pe = string, *unescape;
+ int unescape_len;
+ char buf[4];
+
+ while (pe < stringEnd) {
+ if (*pe == '\\') {
+ unescape = (char *) "?";
+ unescape_len = 1;
+ if (pe > p) rb_str_buf_cat(result, p, pe - p);
+ switch (*++pe) {
+ case 'n':
+ unescape = (char *) "\n";
+ break;
+ case 'r':
+ unescape = (char *) "\r";
+ break;
+ case 't':
+ unescape = (char *) "\t";
+ break;
+ case '"':
+ unescape = (char *) "\"";
+ break;
+ case '\\':
+ unescape = (char *) "\\";
+ break;
+ case 'b':
+ unescape = (char *) "\b";
+ break;
+ case 'f':
+ unescape = (char *) "\f";
+ break;
+ case 'u':
+ if (pe > stringEnd - 4) {
+ return Qnil;
+ } else {
+ UTF32 ch = unescape_unicode((unsigned char *) ++pe);
+ pe += 3;
+ if (UNI_SUR_HIGH_START == (ch & 0xFC00)) {
+ pe++;
+ if (pe > stringEnd - 6) return Qnil;
+ if (pe[0] == '\\' && pe[1] == 'u') {
+ UTF32 sur = unescape_unicode((unsigned char *) pe + 2);
+ ch = (((ch & 0x3F) << 10) | ((((ch >> 6) & 0xF) + 1) << 16)
+ | (sur & 0x3FF));
+ pe += 5;
+ } else {
+ unescape = (char *) "?";
+ break;
+ }
+ }
+ unescape_len = convert_UTF32_to_UTF8(buf, ch);
+ unescape = buf;
+ }
+ break;
+ default:
+ p = pe;
+ continue;
+ }
+ rb_str_buf_cat(result, unescape, unescape_len);
+ p = ++pe;
+ } else {
+ pe++;
+ }
+ }
+ rb_str_buf_cat(result, p, pe - p);
+ return result;
+}
+
+%%{
+ machine JSON_string;
+ include JSON_common;
+
+ write data;
+
+ action parse_string {
+ *result = json_string_unescape(*result, json->memo + 1, p);
+ if (NIL_P(*result)) {
+ fhold;
+ fbreak;
+ } else {
+ FORCE_UTF8(*result);
+ fexec p + 1;
+ }
+ }
+
+ action exit { fhold; fbreak; }
+
+ main := '"' ((^([\"\\] | 0..0x1f) | '\\'[\"\\/bfnrt] | '\\u'[0-9a-fA-F]{4} | '\\'^([\"\\/bfnrtu]|0..0x1f))* %parse_string) '"' @exit;
+}%%
+
+static int
+match_i(VALUE regexp, VALUE klass, VALUE memo)
+{
+ if (regexp == Qundef) return ST_STOP;
+ if (RTEST(rb_funcall(klass, i_json_creatable_p, 0)) &&
+ RTEST(rb_funcall(regexp, i_match, 1, rb_ary_entry(memo, 0)))) {
+ rb_ary_push(memo, klass);
+ return ST_STOP;
+ }
+ return ST_CONTINUE;
+}
+
+static char *JSON_parse_string(JSON_Parser *json, char *p, char *pe, VALUE *result)
+{
+ int cs = EVIL;
+ VALUE match_string;
+
+ *result = rb_str_buf_new(0);
+ %% write init;
+ json->memo = p;
+ %% write exec;
+
+ if (json->create_additions && RTEST(match_string = json->match_string)) {
+ VALUE klass;
+ VALUE memo = rb_ary_new2(2);
+ rb_ary_push(memo, *result);
+ rb_hash_foreach(match_string, match_i, memo);
+ klass = rb_ary_entry(memo, 1);
+ if (RTEST(klass)) {
+ *result = rb_funcall(klass, i_json_create, 1, *result);
+ }
+ }
+
+ if (json->symbolize_names && json->parsing_name) {
+ *result = rb_str_intern(*result);
+ }
+ if (cs >= JSON_string_first_final) {
+ return p + 1;
+ } else {
+ return NULL;
+ }
+}
+
+/*
+ * Document-class: JSON::Ext::Parser
+ *
+ * This is the JSON parser implemented as a C extension. It can be configured
+ * to be used by setting
+ *
+ * JSON.parser = JSON::Ext::Parser
+ *
+ * with the method parser= in JSON.
+ *
+ */
+
+static VALUE convert_encoding(VALUE source)
+{
+ char *ptr = RSTRING_PTR(source);
+ long len = RSTRING_LEN(source);
+ if (len < 2) {
+ rb_raise(eParserError, "A JSON text must at least contain two octets!");
+ }
+#ifdef HAVE_RUBY_ENCODING_H
+ {
+ VALUE encoding = rb_funcall(source, i_encoding, 0);
+ if (encoding == CEncoding_ASCII_8BIT) {
+ if (len >= 4 && ptr[0] == 0 && ptr[1] == 0 && ptr[2] == 0) {
+ source = rb_funcall(source, i_encode, 2, CEncoding_UTF_8, CEncoding_UTF_32BE);
+ } else if (len >= 4 && ptr[0] == 0 && ptr[2] == 0) {
+ source = rb_funcall(source, i_encode, 2, CEncoding_UTF_8, CEncoding_UTF_16BE);
+ } else if (len >= 4 && ptr[1] == 0 && ptr[2] == 0 && ptr[3] == 0) {
+ source = rb_funcall(source, i_encode, 2, CEncoding_UTF_8, CEncoding_UTF_32LE);
+ } else if (len >= 4 && ptr[1] == 0 && ptr[3] == 0) {
+ source = rb_funcall(source, i_encode, 2, CEncoding_UTF_8, CEncoding_UTF_16LE);
+ } else {
+ source = rb_str_dup(source);
+ FORCE_UTF8(source);
+ }
+ } else {
+ source = rb_funcall(source, i_encode, 1, CEncoding_UTF_8);
+ }
+ }
+#else
+ if (len >= 4 && ptr[0] == 0 && ptr[1] == 0 && ptr[2] == 0) {
+ source = rb_funcall(mJSON, i_iconv, 3, rb_str_new2("utf-8"), rb_str_new2("utf-32be"), source);
+ } else if (len >= 4 && ptr[0] == 0 && ptr[2] == 0) {
+ source = rb_funcall(mJSON, i_iconv, 3, rb_str_new2("utf-8"), rb_str_new2("utf-16be"), source);
+ } else if (len >= 4 && ptr[1] == 0 && ptr[2] == 0 && ptr[3] == 0) {
+ source = rb_funcall(mJSON, i_iconv, 3, rb_str_new2("utf-8"), rb_str_new2("utf-32le"), source);
+ } else if (len >= 4 && ptr[1] == 0 && ptr[3] == 0) {
+ source = rb_funcall(mJSON, i_iconv, 3, rb_str_new2("utf-8"), rb_str_new2("utf-16le"), source);
+ }
+#endif
+ return source;
+}
+
+/*
+ * call-seq: new(source, opts => {})
+ *
+ * Creates a new JSON::Ext::Parser instance for the string _source_.
+ *
+ * Creates a new JSON::Ext::Parser instance for the string _source_.
+ *
+ * It will be configured by the _opts_ hash. _opts_ can have the following
+ * keys:
+ *
+ * _opts_ can have the following keys:
+ * * *max_nesting*: The maximum depth of nesting allowed in the parsed data
+ * structures. Disable depth checking with :max_nesting => false|nil|0, it
+ * defaults to 100.
+ * * *allow_nan*: If set to true, allow NaN, Infinity and -Infinity in
+ * defiance of RFC 4627 to be parsed by the Parser. This option defaults to
+ * false.
+ * * *symbolize_names*: If set to true, returns symbols for the names
+ * (keys) in a JSON object. Otherwise strings are returned, which is also
+ * the default.
+ * * *create_additions*: If set to false, the Parser doesn't create
+ * additions even if a matching class and create_id was found. This option
+ * defaults to false.
+ * * *object_class*: Defaults to Hash
+ * * *array_class*: Defaults to Array
+ */
+static VALUE cParser_initialize(int argc, VALUE *argv, VALUE self)
+{
+ VALUE source, opts;
+ GET_PARSER_INIT;
+
+ if (json->Vsource) {
+ rb_raise(rb_eTypeError, "already initialized instance");
+ }
+ rb_scan_args(argc, argv, "11", &source, &opts);
+ if (!NIL_P(opts)) {
+ opts = rb_convert_type(opts, T_HASH, "Hash", "to_hash");
+ if (NIL_P(opts)) {
+ rb_raise(rb_eArgError, "opts needs to be like a hash");
+ } else {
+ VALUE tmp = ID2SYM(i_max_nesting);
+ if (option_given_p(opts, tmp)) {
+ VALUE max_nesting = rb_hash_aref(opts, tmp);
+ if (RTEST(max_nesting)) {
+ Check_Type(max_nesting, T_FIXNUM);
+ json->max_nesting = FIX2INT(max_nesting);
+ } else {
+ json->max_nesting = 0;
+ }
+ } else {
+ json->max_nesting = 100;
+ }
+ tmp = ID2SYM(i_allow_nan);
+ if (option_given_p(opts, tmp)) {
+ json->allow_nan = RTEST(rb_hash_aref(opts, tmp)) ? 1 : 0;
+ } else {
+ json->allow_nan = 0;
+ }
+ tmp = ID2SYM(i_symbolize_names);
+ if (option_given_p(opts, tmp)) {
+ json->symbolize_names = RTEST(rb_hash_aref(opts, tmp)) ? 1 : 0;
+ } else {
+ json->symbolize_names = 0;
+ }
+ tmp = ID2SYM(i_quirks_mode);
+ if (option_given_p(opts, tmp)) {
+ VALUE quirks_mode = rb_hash_aref(opts, tmp);
+ json->quirks_mode = RTEST(quirks_mode) ? 1 : 0;
+ } else {
+ json->quirks_mode = 0;
+ }
+ tmp = ID2SYM(i_create_additions);
+ if (option_given_p(opts, tmp)) {
+ json->create_additions = RTEST(rb_hash_aref(opts, tmp));
+ } else {
+ json->create_additions = 0;
+ }
+ tmp = ID2SYM(i_create_id);
+ if (option_given_p(opts, tmp)) {
+ json->create_id = rb_hash_aref(opts, tmp);
+ } else {
+ json->create_id = rb_funcall(mJSON, i_create_id, 0);
+ }
+ tmp = ID2SYM(i_object_class);
+ if (option_given_p(opts, tmp)) {
+ json->object_class = rb_hash_aref(opts, tmp);
+ } else {
+ json->object_class = Qnil;
+ }
+ tmp = ID2SYM(i_array_class);
+ if (option_given_p(opts, tmp)) {
+ json->array_class = rb_hash_aref(opts, tmp);
+ } else {
+ json->array_class = Qnil;
+ }
+ tmp = ID2SYM(i_match_string);
+ if (option_given_p(opts, tmp)) {
+ VALUE match_string = rb_hash_aref(opts, tmp);
+ json->match_string = RTEST(match_string) ? match_string : Qnil;
+ } else {
+ json->match_string = Qnil;
+ }
+ }
+ } else {
+ json->max_nesting = 100;
+ json->allow_nan = 0;
+ json->create_additions = 1;
+ json->create_id = rb_funcall(mJSON, i_create_id, 0);
+ json->object_class = Qnil;
+ json->array_class = Qnil;
+ }
+ source = rb_convert_type(source, T_STRING, "String", "to_str");
+ if (!json->quirks_mode) {
+ source = convert_encoding(StringValue(source));
+ }
+ json->current_nesting = 0;
+ StringValue(source);
+ json->len = RSTRING_LEN(source);
+ json->source = RSTRING_PTR(source);;
+ json->Vsource = source;
+ return self;
+}
+
+%%{
+ machine JSON;
+
+ write data;
+
+ include JSON_common;
+
+ action parse_object {
+ char *np;
+ json->current_nesting = 1;
+ np = JSON_parse_object(json, fpc, pe, &result);
+ if (np == NULL) { fhold; fbreak; } else fexec np;
+ }
+
+ action parse_array {
+ char *np;
+ json->current_nesting = 1;
+ np = JSON_parse_array(json, fpc, pe, &result);
+ if (np == NULL) { fhold; fbreak; } else fexec np;
+ }
+
+ main := ignore* (
+ begin_object >parse_object |
+ begin_array >parse_array
+ ) ignore*;
+}%%
+
+static VALUE cParser_parse_strict(VALUE self)
+{
+ char *p, *pe;
+ int cs = EVIL;
+ VALUE result = Qnil;
+ GET_PARSER;
+
+ %% write init;
+ p = json->source;
+ pe = p + json->len;
+ %% write exec;
+
+ if (cs >= JSON_first_final && p == pe) {
+ return result;
+ } else {
+ rb_raise(eParserError, "%u: unexpected token at '%s'", __LINE__, p);
+ return Qnil;
+ }
+}
+
+
+%%{
+ machine JSON_quirks_mode;
+
+ write data;
+
+ include JSON_common;
+
+ action parse_value {
+ char *np = JSON_parse_value(json, fpc, pe, &result);
+ if (np == NULL) { fhold; fbreak; } else fexec np;
+ }
+
+ main := ignore* (
+ begin_value >parse_value
+ ) ignore*;
+}%%
+
+static VALUE cParser_parse_quirks_mode(VALUE self)
+{
+ char *p, *pe;
+ int cs = EVIL;
+ VALUE result = Qnil;
+ GET_PARSER;
+
+ %% write init;
+ p = json->source;
+ pe = p + json->len;
+ %% write exec;
+
+ if (cs >= JSON_quirks_mode_first_final && p == pe) {
+ return result;
+ } else {
+ rb_raise(eParserError, "%u: unexpected token at '%s'", __LINE__, p);
+ return Qnil;
+ }
+}
+
+/*
+ * call-seq: parse()
+ *
+ * Parses the current JSON text _source_ and returns the complete data
+ * structure as a result.
+ */
+static VALUE cParser_parse(VALUE self)
+{
+ GET_PARSER;
+
+ if (json->quirks_mode) {
+ return cParser_parse_quirks_mode(self);
+ } else {
+ return cParser_parse_strict(self);
+ }
+}
+
+
+static JSON_Parser *JSON_allocate(void)
+{
+ JSON_Parser *json = ZALLOC(JSON_Parser);
+ json->fbuffer = fbuffer_alloc(0);
+ return json;
+}
+
+static void JSON_mark(void *ptr)
+{
+ JSON_Parser *json = ptr;
+ rb_gc_mark_maybe(json->Vsource);
+ rb_gc_mark_maybe(json->create_id);
+ rb_gc_mark_maybe(json->object_class);
+ rb_gc_mark_maybe(json->array_class);
+ rb_gc_mark_maybe(json->match_string);
+}
+
+static void JSON_free(void *ptr)
+{
+ JSON_Parser *json = ptr;
+ fbuffer_free(json->fbuffer);
+ ruby_xfree(json);
+}
+
+static size_t JSON_memsize(const void *ptr)
+{
+ const JSON_Parser *json = ptr;
+ return sizeof(*json) + FBUFFER_CAPA(json->fbuffer);
+}
+
+#ifdef NEW_TYPEDDATA_WRAPPER
+static const rb_data_type_t JSON_Parser_type = {
+ "JSON/Parser",
+ {JSON_mark, JSON_free, JSON_memsize,},
+#ifdef RUBY_TYPED_FREE_IMMEDIATELY
+ 0, 0,
+ RUBY_TYPED_FREE_IMMEDIATELY,
+#endif
+};
+#endif
+
+static VALUE cJSON_parser_s_allocate(VALUE klass)
+{
+ JSON_Parser *json = JSON_allocate();
+ return TypedData_Wrap_Struct(klass, &JSON_Parser_type, json);
+}
+
+/*
+ * call-seq: source()
+ *
+ * Returns a copy of the current _source_ string, that was used to construct
+ * this Parser.
+ */
+static VALUE cParser_source(VALUE self)
+{
+ GET_PARSER;
+ return rb_str_dup(json->Vsource);
+}
+
+/*
+ * call-seq: quirks_mode?()
+ *
+ * Returns a true, if this parser is in quirks_mode, false otherwise.
+ */
+static VALUE cParser_quirks_mode_p(VALUE self)
+{
+ GET_PARSER;
+ return json->quirks_mode ? Qtrue : Qfalse;
+}
+
+
+void Init_parser(void)
+{
+ rb_require("json/common");
+ mJSON = rb_define_module("JSON");
+ mExt = rb_define_module_under(mJSON, "Ext");
+ cParser = rb_define_class_under(mExt, "Parser", rb_cObject);
+ eParserError = rb_path2class("JSON::ParserError");
+ eNestingError = rb_path2class("JSON::NestingError");
+ rb_define_alloc_func(cParser, cJSON_parser_s_allocate);
+ rb_define_method(cParser, "initialize", cParser_initialize, -1);
+ rb_define_method(cParser, "parse", cParser_parse, 0);
+ rb_define_method(cParser, "source", cParser_source, 0);
+ rb_define_method(cParser, "quirks_mode?", cParser_quirks_mode_p, 0);
+
+ CNaN = rb_const_get(mJSON, rb_intern("NaN"));
+ CInfinity = rb_const_get(mJSON, rb_intern("Infinity"));
+ CMinusInfinity = rb_const_get(mJSON, rb_intern("MinusInfinity"));
+
+ i_json_creatable_p = rb_intern("json_creatable?");
+ i_json_create = rb_intern("json_create");
+ i_create_id = rb_intern("create_id");
+ i_create_additions = rb_intern("create_additions");
+ i_chr = rb_intern("chr");
+ i_max_nesting = rb_intern("max_nesting");
+ i_allow_nan = rb_intern("allow_nan");
+ i_symbolize_names = rb_intern("symbolize_names");
+ i_quirks_mode = rb_intern("quirks_mode");
+ i_object_class = rb_intern("object_class");
+ i_array_class = rb_intern("array_class");
+ i_match = rb_intern("match");
+ i_match_string = rb_intern("match_string");
+ i_key_p = rb_intern("key?");
+ i_deep_const_get = rb_intern("deep_const_get");
+ i_aset = rb_intern("[]=");
+ i_aref = rb_intern("[]");
+ i_leftshift = rb_intern("<<");
+#ifdef HAVE_RUBY_ENCODING_H
+ CEncoding_UTF_8 = rb_funcall(rb_path2class("Encoding"), rb_intern("find"), 1, rb_str_new2("utf-8"));
+ CEncoding_UTF_16BE = rb_funcall(rb_path2class("Encoding"), rb_intern("find"), 1, rb_str_new2("utf-16be"));
+ CEncoding_UTF_16LE = rb_funcall(rb_path2class("Encoding"), rb_intern("find"), 1, rb_str_new2("utf-16le"));
+ CEncoding_UTF_32BE = rb_funcall(rb_path2class("Encoding"), rb_intern("find"), 1, rb_str_new2("utf-32be"));
+ CEncoding_UTF_32LE = rb_funcall(rb_path2class("Encoding"), rb_intern("find"), 1, rb_str_new2("utf-32le"));
+ CEncoding_ASCII_8BIT = rb_funcall(rb_path2class("Encoding"), rb_intern("find"), 1, rb_str_new2("ascii-8bit"));
+ i_encoding = rb_intern("encoding");
+ i_encode = rb_intern("encode");
+#else
+ i_iconv = rb_intern("iconv");
+#endif
+}
+
+/*
+ * Local variables:
+ * mode: c
+ * c-file-style: ruby
+ * indent-tabs-mode: nil
+ * End:
+ */
diff --git a/ext/json/parser/prereq.mk b/ext/json/parser/prereq.mk
new file mode 100644
index 0000000000..be7bcb4319
--- /dev/null
+++ b/ext/json/parser/prereq.mk
@@ -0,0 +1,10 @@
+RAGEL = ragel
+
+.SUFFIXES: .rl
+
+.rl.c:
+ $(RAGEL) -G2 $<
+ $(BASERUBY) -pli -e '$$_.sub!(/[ \t]+$$/, "")' \
+ -e '$$_.sub!(/^static const int (JSON_.*=.*);$$/, "enum {\\1};")' $@
+
+parser.c:
diff --git a/gems/bundled_gems b/gems/bundled_gems
index 5773815bd1..4b7d0d8e7e 100644
--- a/gems/bundled_gems
+++ b/gems/bundled_gems
@@ -2,4 +2,3 @@ power_assert 0.2.2
test-unit 3.0.9
minitest 5.4.3
rake 10.4.2
-json 1.8.2
diff --git a/lib/rdoc/rubygems_hook.rb b/lib/rdoc/rubygems_hook.rb
index 5711aa1abf..c4eaddbd97 100644
--- a/lib/rdoc/rubygems_hook.rb
+++ b/lib/rdoc/rubygems_hook.rb
@@ -3,11 +3,6 @@ require 'rubygems/user_interaction'
require 'fileutils'
require 'rdoc'
-begin
- require 'json'
-rescue LoadError
-end
-
##
# Gem::RDoc provides methods to generate RDoc and ri data for installed gems
# upon gem installation.
@@ -205,7 +200,7 @@ class RDoc::RubygemsHook
@generate_ri and (@force or not File.exist? @ri_dir)
document 'darkfish', options, @rdoc_dir if
- defined?(JSON) and @generate_rdoc and (@force or not File.exist? @rdoc_dir)
+ @generate_rdoc and (@force or not File.exist? @rdoc_dir)
end
##
@@ -255,3 +250,4 @@ class RDoc::RubygemsHook
end
end
+
diff --git a/lib/rdoc/test_case.rb b/lib/rdoc/test_case.rb
index 3b2c181201..4989516b7a 100644
--- a/lib/rdoc/test_case.rb
+++ b/lib/rdoc/test_case.rb
@@ -6,11 +6,6 @@ rescue NoMethodError, Gem::LoadError
# for ruby tests
end
-begin
- require 'json'
-rescue LoadError
-end
-
require 'minitest/autorun'
require 'minitest/benchmark' if ENV['BENCHMARK']
@@ -219,3 +214,4 @@ $LOAD_PATH.each do |load_path|
break if load_path[0] == ?/
load_path.replace File.expand_path load_path
end if RUBY_VERSION < '1.9'
+
diff --git a/lib/rubygems/test_case.rb b/lib/rubygems/test_case.rb
index a116b6c93c..306edcc3f6 100644
--- a/lib/rubygems/test_case.rb
+++ b/lib/rubygems/test_case.rb
@@ -21,16 +21,7 @@ end
# them while we're testing rubygems, and thus we can't actually load them.
unless Gem::Dependency.new('rdoc', '>= 3.10').matching_specs.empty?
gem 'rdoc'
- begin
- gem 'json'
- rescue Gem::LoadError
- # for Ruby 2.3.0
- end
-end
-
-begin
- require 'json'
-rescue LoadError
+ gem 'json'
end
require 'minitest/autorun'
diff --git a/test/json/fixtures/fail1.json b/test/json/fixtures/fail1.json
new file mode 100644
index 0000000000..6216b865f1
--- /dev/null
+++ b/test/json/fixtures/fail1.json
@@ -0,0 +1 @@
+"A JSON payload should be an object or array, not a string." \ No newline at end of file
diff --git a/test/json/fixtures/fail10.json b/test/json/fixtures/fail10.json
new file mode 100644
index 0000000000..5d8c0047bd
--- /dev/null
+++ b/test/json/fixtures/fail10.json
@@ -0,0 +1 @@
+{"Extra value after close": true} "misplaced quoted value" \ No newline at end of file
diff --git a/test/json/fixtures/fail11.json b/test/json/fixtures/fail11.json
new file mode 100644
index 0000000000..76eb95b458
--- /dev/null
+++ b/test/json/fixtures/fail11.json
@@ -0,0 +1 @@
+{"Illegal expression": 1 + 2} \ No newline at end of file
diff --git a/test/json/fixtures/fail12.json b/test/json/fixtures/fail12.json
new file mode 100644
index 0000000000..77580a4522
--- /dev/null
+++ b/test/json/fixtures/fail12.json
@@ -0,0 +1 @@
+{"Illegal invocation": alert()} \ No newline at end of file
diff --git a/test/json/fixtures/fail13.json b/test/json/fixtures/fail13.json
new file mode 100644
index 0000000000..379406b59b
--- /dev/null
+++ b/test/json/fixtures/fail13.json
@@ -0,0 +1 @@
+{"Numbers cannot have leading zeroes": 013} \ No newline at end of file
diff --git a/test/json/fixtures/fail14.json b/test/json/fixtures/fail14.json
new file mode 100644
index 0000000000..0ed366b38a
--- /dev/null
+++ b/test/json/fixtures/fail14.json
@@ -0,0 +1 @@
+{"Numbers cannot be hex": 0x14} \ No newline at end of file
diff --git a/test/json/fixtures/fail18.json b/test/json/fixtures/fail18.json
new file mode 100644
index 0000000000..ebc11eb4c2
--- /dev/null
+++ b/test/json/fixtures/fail18.json
@@ -0,0 +1 @@
+[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]
diff --git a/test/json/fixtures/fail19.json b/test/json/fixtures/fail19.json
new file mode 100644
index 0000000000..3b9c46fa9a
--- /dev/null
+++ b/test/json/fixtures/fail19.json
@@ -0,0 +1 @@
+{"Missing colon" null} \ No newline at end of file
diff --git a/test/json/fixtures/fail2.json b/test/json/fixtures/fail2.json
new file mode 100644
index 0000000000..6b7c11e5a5
--- /dev/null
+++ b/test/json/fixtures/fail2.json
@@ -0,0 +1 @@
+["Unclosed array" \ No newline at end of file
diff --git a/test/json/fixtures/fail20.json b/test/json/fixtures/fail20.json
new file mode 100644
index 0000000000..27c1af3e72
--- /dev/null
+++ b/test/json/fixtures/fail20.json
@@ -0,0 +1 @@
+{"Double colon":: null} \ No newline at end of file
diff --git a/test/json/fixtures/fail21.json b/test/json/fixtures/fail21.json
new file mode 100644
index 0000000000..62474573b2
--- /dev/null
+++ b/test/json/fixtures/fail21.json
@@ -0,0 +1 @@
+{"Comma instead of colon", null} \ No newline at end of file
diff --git a/test/json/fixtures/fail22.json b/test/json/fixtures/fail22.json
new file mode 100644
index 0000000000..a7752581bc
--- /dev/null
+++ b/test/json/fixtures/fail22.json
@@ -0,0 +1 @@
+["Colon instead of comma": false] \ No newline at end of file
diff --git a/test/json/fixtures/fail23.json b/test/json/fixtures/fail23.json
new file mode 100644
index 0000000000..494add1ca1
--- /dev/null
+++ b/test/json/fixtures/fail23.json
@@ -0,0 +1 @@
+["Bad value", truth] \ No newline at end of file
diff --git a/test/json/fixtures/fail24.json b/test/json/fixtures/fail24.json
new file mode 100644
index 0000000000..caff239bfc
--- /dev/null
+++ b/test/json/fixtures/fail24.json
@@ -0,0 +1 @@
+['single quote'] \ No newline at end of file
diff --git a/test/json/fixtures/fail25.json b/test/json/fixtures/fail25.json
new file mode 100644
index 0000000000..2dfbd259ee
--- /dev/null
+++ b/test/json/fixtures/fail25.json
@@ -0,0 +1 @@
+["tab character in string "]
diff --git a/test/json/fixtures/fail27.json b/test/json/fixtures/fail27.json
new file mode 100644
index 0000000000..6b01a2ca4a
--- /dev/null
+++ b/test/json/fixtures/fail27.json
@@ -0,0 +1,2 @@
+["line
+break"] \ No newline at end of file
diff --git a/test/json/fixtures/fail28.json b/test/json/fixtures/fail28.json
new file mode 100644
index 0000000000..621a0101c6
--- /dev/null
+++ b/test/json/fixtures/fail28.json
@@ -0,0 +1,2 @@
+["line\
+break"] \ No newline at end of file
diff --git a/test/json/fixtures/fail3.json b/test/json/fixtures/fail3.json
new file mode 100644
index 0000000000..168c81eb78
--- /dev/null
+++ b/test/json/fixtures/fail3.json
@@ -0,0 +1 @@
+{unquoted_key: "keys must be quoted"} \ No newline at end of file
diff --git a/test/json/fixtures/fail4.json b/test/json/fixtures/fail4.json
new file mode 100644
index 0000000000..9de168bf34
--- /dev/null
+++ b/test/json/fixtures/fail4.json
@@ -0,0 +1 @@
+["extra comma",] \ No newline at end of file
diff --git a/test/json/fixtures/fail5.json b/test/json/fixtures/fail5.json
new file mode 100644
index 0000000000..ddf3ce3d24
--- /dev/null
+++ b/test/json/fixtures/fail5.json
@@ -0,0 +1 @@
+["double extra comma",,] \ No newline at end of file
diff --git a/test/json/fixtures/fail6.json b/test/json/fixtures/fail6.json
new file mode 100644
index 0000000000..ed91580e1b
--- /dev/null
+++ b/test/json/fixtures/fail6.json
@@ -0,0 +1 @@
+[ , "<-- missing value"] \ No newline at end of file
diff --git a/test/json/fixtures/fail7.json b/test/json/fixtures/fail7.json
new file mode 100644
index 0000000000..8a96af3e4e
--- /dev/null
+++ b/test/json/fixtures/fail7.json
@@ -0,0 +1 @@
+["Comma after the close"], \ No newline at end of file
diff --git a/test/json/fixtures/fail8.json b/test/json/fixtures/fail8.json
new file mode 100644
index 0000000000..b28479c6ec
--- /dev/null
+++ b/test/json/fixtures/fail8.json
@@ -0,0 +1 @@
+["Extra close"]] \ No newline at end of file
diff --git a/test/json/fixtures/fail9.json b/test/json/fixtures/fail9.json
new file mode 100644
index 0000000000..5815574f36
--- /dev/null
+++ b/test/json/fixtures/fail9.json
@@ -0,0 +1 @@
+{"Extra comma": true,} \ No newline at end of file
diff --git a/test/json/fixtures/pass1.json b/test/json/fixtures/pass1.json
new file mode 100644
index 0000000000..7828fcc137
--- /dev/null
+++ b/test/json/fixtures/pass1.json
@@ -0,0 +1,56 @@
+[
+ "JSON Test Pattern pass1",
+ {"object with 1 member":["array with 1 element"]},
+ {},
+ [],
+ -42,
+ true,
+ false,
+ null,
+ {
+ "integer": 1234567890,
+ "real": -9876.543210,
+ "e": 0.123456789e-12,
+ "E": 1.234567890E+34,
+ "": 23456789012E666,
+ "zero": 0,
+ "one": 1,
+ "space": " ",
+ "quote": "\"",
+ "backslash": "\\",
+ "controls": "\b\f\n\r\t",
+ "slash": "/ & \/",
+ "alpha": "abcdefghijklmnopqrstuvwyz",
+ "ALPHA": "ABCDEFGHIJKLMNOPQRSTUVWYZ",
+ "digit": "0123456789",
+ "special": "`1~!@#$%^&*()_+-={':[,]}|;.</>?",
+ "hex": "\u0123\u4567\u89AB\uCDEF\uabcd\uef4A",
+ "true": true,
+ "false": false,
+ "null": null,
+ "array":[ ],
+ "object":{ },
+ "address": "50 St. James Street",
+ "url": "http://www.JSON.org/",
+ "comment": "// /* <!-- --",
+ "# -- --> */": " ",
+ " s p a c e d " :[1,2 , 3
+
+,
+
+4 , 5 , 6 ,7 ],
+ "compact": [1,2,3,4,5,6,7],
+ "jsontext": "{\"object with 1 member\":[\"array with 1 element\"]}",
+ "quotes": "&#34; \u0022 %22 0x22 034 &#x22;",
+ "\/\\\"\uCAFE\uBABE\uAB98\uFCDE\ubcda\uef4A\b\f\n\r\t`1~!@#$%^&*()_+-=[]{}|;:',./<>?"
+: "A key can be any string"
+ },
+ 0.5 ,98.6
+,
+99.44
+,
+
+1066
+
+
+,"rosebud"] \ No newline at end of file
diff --git a/test/json/fixtures/pass15.json b/test/json/fixtures/pass15.json
new file mode 100644
index 0000000000..fc8376b605
--- /dev/null
+++ b/test/json/fixtures/pass15.json
@@ -0,0 +1 @@
+["Illegal backslash escape: \x15"] \ No newline at end of file
diff --git a/test/json/fixtures/pass16.json b/test/json/fixtures/pass16.json
new file mode 100644
index 0000000000..c43ae3c286
--- /dev/null
+++ b/test/json/fixtures/pass16.json
@@ -0,0 +1 @@
+["Illegal backslash escape: \'"] \ No newline at end of file
diff --git a/test/json/fixtures/pass17.json b/test/json/fixtures/pass17.json
new file mode 100644
index 0000000000..62b9214aed
--- /dev/null
+++ b/test/json/fixtures/pass17.json
@@ -0,0 +1 @@
+["Illegal backslash escape: \017"] \ No newline at end of file
diff --git a/test/json/fixtures/pass2.json b/test/json/fixtures/pass2.json
new file mode 100644
index 0000000000..d3c63c7ad8
--- /dev/null
+++ b/test/json/fixtures/pass2.json
@@ -0,0 +1 @@
+[[[[[[[[[[[[[[[[[[["Not too deep"]]]]]]]]]]]]]]]]]]] \ No newline at end of file
diff --git a/test/json/fixtures/pass26.json b/test/json/fixtures/pass26.json
new file mode 100644
index 0000000000..845d26a6a5
--- /dev/null
+++ b/test/json/fixtures/pass26.json
@@ -0,0 +1 @@
+["tab\ character\ in\ string\ "] \ No newline at end of file
diff --git a/test/json/fixtures/pass3.json b/test/json/fixtures/pass3.json
new file mode 100644
index 0000000000..4528d51f1a
--- /dev/null
+++ b/test/json/fixtures/pass3.json
@@ -0,0 +1,6 @@
+{
+ "JSON Test Pattern pass3": {
+ "The outermost value": "must be an object or array.",
+ "In this test": "It is an object."
+ }
+}
diff --git a/test/json/setup_variant.rb b/test/json/setup_variant.rb
new file mode 100644
index 0000000000..2dab184bc4
--- /dev/null
+++ b/test/json/setup_variant.rb
@@ -0,0 +1,11 @@
+case ENV['JSON']
+when 'pure'
+ $:.unshift 'lib'
+ require 'json/pure'
+when 'ext'
+ $:.unshift 'ext', 'lib'
+ require 'json/ext'
+else
+ $:.unshift 'ext', 'lib'
+ require 'json'
+end
diff --git a/test/json/test_json.rb b/test/json/test_json.rb
new file mode 100644
index 0000000000..7957773f23
--- /dev/null
+++ b/test/json/test_json.rb
@@ -0,0 +1,553 @@
+#!/usr/bin/env ruby
+# encoding: utf-8
+
+require 'test/unit'
+require File.join(File.dirname(__FILE__), 'setup_variant')
+require 'stringio'
+require 'tempfile'
+require 'ostruct'
+
+unless Array.method_defined?(:permutation)
+ begin
+ require 'enumerator'
+ require 'permutation'
+ class Array
+ def permutation
+ Permutation.for(self).to_enum.map { |x| x.project }
+ end
+ end
+ rescue LoadError
+ warn "Skipping permutation tests."
+ end
+end
+
+class TestJSON < Test::Unit::TestCase
+ include JSON
+
+ def setup
+ @ary = [1, "foo", 3.14, 4711.0, 2.718, nil, [1,-2,3], false, true].map do
+ |x| [x]
+ end
+ @ary_to_parse = ["1", '"foo"', "3.14", "4711.0", "2.718", "null",
+ "[1,-2,3]", "false", "true"].map do
+ |x| "[#{x}]"
+ end
+ @hash = {
+ 'a' => 2,
+ 'b' => 3.141,
+ 'c' => 'c',
+ 'd' => [ 1, "b", 3.14 ],
+ 'e' => { 'foo' => 'bar' },
+ 'g' => "\"\0\037",
+ 'h' => 1000.0,
+ 'i' => 0.001
+ }
+ @json = '{"a":2,"b":3.141,"c":"c","d":[1,"b",3.14],"e":{"foo":"bar"},'\
+ '"g":"\\"\\u0000\\u001f","h":1.0E3,"i":1.0E-3}'
+ end
+
+ def test_construction
+ parser = JSON::Parser.new('test')
+ assert_equal 'test', parser.source
+ end
+
+ def assert_equal_float(expected, is)
+ assert_in_delta(expected.first, is.first, 1e-2)
+ end
+
+ def test_parse_simple_arrays
+ assert_equal([], parse('[]'))
+ assert_equal([], parse(' [ ] '))
+ assert_equal([nil], parse('[null]'))
+ assert_equal([false], parse('[false]'))
+ assert_equal([true], parse('[true]'))
+ assert_equal([-23], parse('[-23]'))
+ assert_equal([23], parse('[23]'))
+ assert_equal([0.23], parse('[0.23]'))
+ assert_equal([0.0], parse('[0e0]'))
+ assert_raises(JSON::ParserError) { parse('[+23.2]') }
+ assert_raises(JSON::ParserError) { parse('[+23]') }
+ assert_raises(JSON::ParserError) { parse('[.23]') }
+ assert_raises(JSON::ParserError) { parse('[023]') }
+ assert_equal_float [3.141], parse('[3.141]')
+ assert_equal_float [-3.141], parse('[-3.141]')
+ assert_equal_float [3.141], parse('[3141e-3]')
+ assert_equal_float [3.141], parse('[3141.1e-3]')
+ assert_equal_float [3.141], parse('[3141E-3]')
+ assert_equal_float [3.141], parse('[3141.0E-3]')
+ assert_equal_float [-3.141], parse('[-3141.0e-3]')
+ assert_equal_float [-3.141], parse('[-3141e-3]')
+ assert_raises(ParserError) { parse('[NaN]') }
+ assert parse('[NaN]', :allow_nan => true).first.nan?
+ assert_raises(ParserError) { parse('[Infinity]') }
+ assert_equal [1.0/0], parse('[Infinity]', :allow_nan => true)
+ assert_raises(ParserError) { parse('[-Infinity]') }
+ assert_equal [-1.0/0], parse('[-Infinity]', :allow_nan => true)
+ assert_equal([""], parse('[""]'))
+ assert_equal(["foobar"], parse('["foobar"]'))
+ assert_equal([{}], parse('[{}]'))
+ end
+
+ def test_parse_simple_objects
+ assert_equal({}, parse('{}'))
+ assert_equal({}, parse(' { } '))
+ assert_equal({ "a" => nil }, parse('{ "a" : null}'))
+ assert_equal({ "a" => nil }, parse('{"a":null}'))
+ assert_equal({ "a" => false }, parse('{ "a" : false } '))
+ assert_equal({ "a" => false }, parse('{"a":false}'))
+ assert_raises(JSON::ParserError) { parse('{false}') }
+ assert_equal({ "a" => true }, parse('{"a":true}'))
+ assert_equal({ "a" => true }, parse(' { "a" : true } '))
+ assert_equal({ "a" => -23 }, parse(' { "a" : -23 } '))
+ assert_equal({ "a" => -23 }, parse(' { "a" : -23 } '))
+ assert_equal({ "a" => 23 }, parse('{"a":23 } '))
+ assert_equal({ "a" => 23 }, parse(' { "a" : 23 } '))
+ assert_equal({ "a" => 0.23 }, parse(' { "a" : 0.23 } '))
+ assert_equal({ "a" => 0.23 }, parse(' { "a" : 0.23 } '))
+ end
+
+ def test_parse_json_primitive_values
+ assert_raise(JSON::ParserError) { JSON.parse('') }
+ assert_raise(JSON::ParserError) { JSON.parse('', :quirks_mode => true) }
+ assert_raise(TypeError) { JSON::Parser.new(nil).parse }
+ assert_raise(TypeError) { JSON::Parser.new(nil, :quirks_mode => true).parse }
+ assert_raise(TypeError) { JSON.parse(nil) }
+ assert_raise(TypeError) { JSON.parse(nil, :quirks_mode => true) }
+ assert_raise(JSON::ParserError) { JSON.parse(' /* foo */ ') }
+ assert_raise(JSON::ParserError) { JSON.parse(' /* foo */ ', :quirks_mode => true) }
+ parser = JSON::Parser.new('null')
+ assert_equal false, parser.quirks_mode?
+ assert_raise(JSON::ParserError) { parser.parse }
+ assert_raise(JSON::ParserError) { JSON.parse('null') }
+ assert_equal nil, JSON.parse('null', :quirks_mode => true)
+ parser = JSON::Parser.new('null', :quirks_mode => true)
+ assert_equal true, parser.quirks_mode?
+ assert_equal nil, parser.parse
+ assert_raise(JSON::ParserError) { JSON.parse('false') }
+ assert_equal false, JSON.parse('false', :quirks_mode => true)
+ assert_raise(JSON::ParserError) { JSON.parse('true') }
+ assert_equal true, JSON.parse('true', :quirks_mode => true)
+ assert_raise(JSON::ParserError) { JSON.parse('23') }
+ assert_equal 23, JSON.parse('23', :quirks_mode => true)
+ assert_raise(JSON::ParserError) { JSON.parse('1') }
+ assert_equal 1, JSON.parse('1', :quirks_mode => true)
+ assert_raise(JSON::ParserError) { JSON.parse('3.141') }
+ assert_in_delta 3.141, JSON.parse('3.141', :quirks_mode => true), 1E-3
+ assert_raise(JSON::ParserError) { JSON.parse('18446744073709551616') }
+ assert_equal 2 ** 64, JSON.parse('18446744073709551616', :quirks_mode => true)
+ assert_raise(JSON::ParserError) { JSON.parse('"foo"') }
+ assert_equal 'foo', JSON.parse('"foo"', :quirks_mode => true)
+ assert_raise(JSON::ParserError) { JSON.parse('NaN', :allow_nan => true) }
+ assert JSON.parse('NaN', :quirks_mode => true, :allow_nan => true).nan?
+ assert_raise(JSON::ParserError) { JSON.parse('Infinity', :allow_nan => true) }
+ assert JSON.parse('Infinity', :quirks_mode => true, :allow_nan => true).infinite?
+ assert_raise(JSON::ParserError) { JSON.parse('-Infinity', :allow_nan => true) }
+ assert JSON.parse('-Infinity', :quirks_mode => true, :allow_nan => true).infinite?
+ assert_raise(JSON::ParserError) { JSON.parse('[ 1, ]', :quirks_mode => true) }
+ end
+
+ if Array.method_defined?(:permutation)
+ def test_parse_more_complex_arrays
+ a = [ nil, false, true, "foßbar", [ "n€st€d", true ], { "nested" => true, "n€ßt€ð2" => {} }]
+ a.permutation.each do |perm|
+ json = pretty_generate(perm)
+ assert_equal perm, parse(json)
+ end
+ end
+
+ def test_parse_complex_objects
+ a = [ nil, false, true, "foßbar", [ "n€st€d", true ], { "nested" => true, "n€ßt€ð2" => {} }]
+ a.permutation.each do |perm|
+ s = "a"
+ orig_obj = perm.inject({}) { |h, x| h[s.dup] = x; s = s.succ; h }
+ json = pretty_generate(orig_obj)
+ assert_equal orig_obj, parse(json)
+ end
+ end
+ end
+
+ def test_parse_arrays
+ assert_equal([1,2,3], parse('[1,2,3]'))
+ assert_equal([1.2,2,3], parse('[1.2,2,3]'))
+ assert_equal([[],[[],[]]], parse('[[],[[],[]]]'))
+ end
+
+ def test_parse_values
+ assert_equal([""], parse('[""]'))
+ assert_equal(["\\"], parse('["\\\\"]'))
+ assert_equal(['"'], parse('["\""]'))
+ assert_equal(['\\"\\'], parse('["\\\\\\"\\\\"]'))
+ assert_equal(["\"\b\n\r\t\0\037"],
+ parse('["\"\b\n\r\t\u0000\u001f"]'))
+ for i in 0 ... @ary.size
+ assert_equal(@ary[i], parse(@ary_to_parse[i]))
+ end
+ end
+
+ def test_parse_array
+ assert_equal([], parse('[]'))
+ assert_equal([], parse(' [ ] '))
+ assert_equal([1], parse('[1]'))
+ assert_equal([1], parse(' [ 1 ] '))
+ assert_equal(@ary,
+ parse('[[1],["foo"],[3.14],[47.11e+2],[2718.0E-3],[null],[[1,-2,3]]'\
+ ',[false],[true]]'))
+ assert_equal(@ary, parse(%Q{ [ [1] , ["foo"] , [3.14] \t , [47.11e+2]\s
+ , [2718.0E-3 ],\r[ null] , [[1, -2, 3 ]], [false ],[ true]\n ] }))
+ end
+
+ class SubArray < Array
+ def <<(v)
+ @shifted = true
+ super
+ end
+
+ def shifted?
+ @shifted
+ end
+ end
+
+ class SubArray2 < Array
+ def to_json(*a)
+ {
+ JSON.create_id => self.class.name,
+ 'ary' => to_a,
+ }.to_json(*a)
+ end
+
+ def self.json_create(o)
+ o.delete JSON.create_id
+ o['ary']
+ end
+ end
+
+ class SubArrayWrapper
+ def initialize
+ @data = []
+ end
+
+ attr_reader :data
+
+ def [](index)
+ @data[index]
+ end
+
+ def <<(value)
+ @data << value
+ @shifted = true
+ end
+
+ def shifted?
+ @shifted
+ end
+ end
+
+ def test_parse_array_custom_array_derived_class
+ res = parse('[1,2]', :array_class => SubArray)
+ assert_equal([1,2], res)
+ assert_equal(SubArray, res.class)
+ assert res.shifted?
+ end
+
+ def test_parse_array_custom_non_array_derived_class
+ res = parse('[1,2]', :array_class => SubArrayWrapper)
+ assert_equal([1,2], res.data)
+ assert_equal(SubArrayWrapper, res.class)
+ assert res.shifted?
+ end
+
+ def test_parse_object
+ assert_equal({}, parse('{}'))
+ assert_equal({}, parse(' { } '))
+ assert_equal({'foo'=>'bar'}, parse('{"foo":"bar"}'))
+ assert_equal({'foo'=>'bar'}, parse(' { "foo" : "bar" } '))
+ end
+
+ class SubHash < Hash
+ def []=(k, v)
+ @item_set = true
+ super
+ end
+
+ def item_set?
+ @item_set
+ end
+ end
+
+ class SubHash2 < Hash
+ def to_json(*a)
+ {
+ JSON.create_id => self.class.name,
+ }.merge(self).to_json(*a)
+ end
+
+ def self.json_create(o)
+ o.delete JSON.create_id
+ self[o]
+ end
+ end
+
+ class SubOpenStruct < OpenStruct
+ def [](k)
+ __send__(k)
+ end
+
+ def []=(k, v)
+ @item_set = true
+ __send__("#{k}=", v)
+ end
+
+ def item_set?
+ @item_set
+ end
+ end
+
+ def test_parse_object_custom_hash_derived_class
+ res = parse('{"foo":"bar"}', :object_class => SubHash)
+ assert_equal({"foo" => "bar"}, res)
+ assert_equal(SubHash, res.class)
+ assert res.item_set?
+ end
+
+ def test_parse_object_custom_non_hash_derived_class
+ res = parse('{"foo":"bar"}', :object_class => SubOpenStruct)
+ assert_equal "bar", res.foo
+ assert_equal(SubOpenStruct, res.class)
+ assert res.item_set?
+ end
+
+ def test_parse_generic_object
+ res = parse('{"foo":"bar", "baz":{}}', :object_class => JSON::GenericObject)
+ assert_equal(JSON::GenericObject, res.class)
+ assert_equal "bar", res.foo
+ assert_equal "bar", res["foo"]
+ assert_equal "bar", res[:foo]
+ assert_equal "bar", res.to_hash[:foo]
+ assert_equal(JSON::GenericObject, res.baz.class)
+ end
+
+ def test_generate_core_subclasses_with_new_to_json
+ obj = SubHash2["foo" => SubHash2["bar" => true]]
+ obj_json = JSON(obj)
+ obj_again = JSON.parse(obj_json, :create_additions => true)
+ assert_kind_of SubHash2, obj_again
+ assert_kind_of SubHash2, obj_again['foo']
+ assert obj_again['foo']['bar']
+ assert_equal obj, obj_again
+ assert_equal ["foo"], JSON(JSON(SubArray2["foo"]), :create_additions => true)
+ end
+
+ def test_generate_core_subclasses_with_default_to_json
+ assert_equal '{"foo":"bar"}', JSON(SubHash["foo" => "bar"])
+ assert_equal '["foo"]', JSON(SubArray["foo"])
+ end
+
+ def test_generate_of_core_subclasses
+ obj = SubHash["foo" => SubHash["bar" => true]]
+ obj_json = JSON(obj)
+ obj_again = JSON(obj_json)
+ assert_kind_of Hash, obj_again
+ assert_kind_of Hash, obj_again['foo']
+ assert obj_again['foo']['bar']
+ assert_equal obj, obj_again
+ end
+
+ def test_parser_reset
+ parser = Parser.new(@json)
+ assert_equal(@hash, parser.parse)
+ assert_equal(@hash, parser.parse)
+ end
+
+ def test_comments
+ json = <<EOT
+{
+ "key1":"value1", // eol comment
+ "key2":"value2" /* multi line
+ * comment */,
+ "key3":"value3" /* multi line
+ // nested eol comment
+ * comment */
+}
+EOT
+ assert_equal(
+ { "key1" => "value1", "key2" => "value2", "key3" => "value3" },
+ parse(json))
+ json = <<EOT
+{
+ "key1":"value1" /* multi line
+ // nested eol comment
+ /* illegal nested multi line comment */
+ * comment */
+}
+EOT
+ assert_raises(ParserError) { parse(json) }
+ json = <<EOT
+{
+ "key1":"value1" /* multi line
+ // nested eol comment
+ closed multi comment */
+ and again, throw an Error */
+}
+EOT
+ assert_raises(ParserError) { parse(json) }
+ json = <<EOT
+{
+ "key1":"value1" /*/*/
+}
+EOT
+ assert_equal({ "key1" => "value1" }, parse(json))
+ end
+
+ def test_backslash
+ data = [ '\\.(?i:gif|jpe?g|png)$' ]
+ json = '["\\\\.(?i:gif|jpe?g|png)$"]'
+ assert_equal json, JSON.generate(data)
+ assert_equal data, JSON.parse(json)
+ #
+ data = [ '\\"' ]
+ json = '["\\\\\""]'
+ assert_equal json, JSON.generate(data)
+ assert_equal data, JSON.parse(json)
+ #
+ json = '["/"]'
+ data = JSON.parse(json)
+ assert_equal ['/'], data
+ assert_equal json, JSON.generate(data)
+ #
+ json = '["\""]'
+ data = JSON.parse(json)
+ assert_equal ['"'], data
+ assert_equal json, JSON.generate(data)
+ json = '["\\\'"]'
+ data = JSON.parse(json)
+ assert_equal ["'"], data
+ assert_equal '["\'"]', JSON.generate(data)
+ end
+
+ def test_wrong_inputs
+ assert_raises(ParserError) { JSON.parse('"foo"') }
+ assert_raises(ParserError) { JSON.parse('123') }
+ assert_raises(ParserError) { JSON.parse('[] bla') }
+ assert_raises(ParserError) { JSON.parse('[] 1') }
+ assert_raises(ParserError) { JSON.parse('[] []') }
+ assert_raises(ParserError) { JSON.parse('[] {}') }
+ assert_raises(ParserError) { JSON.parse('{} []') }
+ assert_raises(ParserError) { JSON.parse('{} {}') }
+ assert_raises(ParserError) { JSON.parse('[NULL]') }
+ assert_raises(ParserError) { JSON.parse('[FALSE]') }
+ assert_raises(ParserError) { JSON.parse('[TRUE]') }
+ assert_raises(ParserError) { JSON.parse('[07] ') }
+ assert_raises(ParserError) { JSON.parse('[0a]') }
+ assert_raises(ParserError) { JSON.parse('[1.]') }
+ assert_raises(ParserError) { JSON.parse(' ') }
+ end
+
+ def test_nesting
+ assert_raises(JSON::NestingError) { JSON.parse '[[]]', :max_nesting => 1 }
+ assert_raises(JSON::NestingError) { JSON.parser.new('[[]]', :max_nesting => 1).parse }
+ assert_equal [[]], JSON.parse('[[]]', :max_nesting => 2)
+ too_deep = '[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]'
+ too_deep_ary = eval too_deep
+ assert_raises(JSON::NestingError) { JSON.parse too_deep }
+ assert_raises(JSON::NestingError) { JSON.parser.new(too_deep).parse }
+ assert_raises(JSON::NestingError) { JSON.parse too_deep, :max_nesting => 100 }
+ ok = JSON.parse too_deep, :max_nesting => 101
+ assert_equal too_deep_ary, ok
+ ok = JSON.parse too_deep, :max_nesting => nil
+ assert_equal too_deep_ary, ok
+ ok = JSON.parse too_deep, :max_nesting => false
+ assert_equal too_deep_ary, ok
+ ok = JSON.parse too_deep, :max_nesting => 0
+ assert_equal too_deep_ary, ok
+ assert_raises(JSON::NestingError) { JSON.generate [[]], :max_nesting => 1 }
+ assert_equal '[[]]', JSON.generate([[]], :max_nesting => 2)
+ assert_raises(JSON::NestingError) { JSON.generate too_deep_ary }
+ assert_raises(JSON::NestingError) { JSON.generate too_deep_ary, :max_nesting => 100 }
+ ok = JSON.generate too_deep_ary, :max_nesting => 101
+ assert_equal too_deep, ok
+ ok = JSON.generate too_deep_ary, :max_nesting => nil
+ assert_equal too_deep, ok
+ ok = JSON.generate too_deep_ary, :max_nesting => false
+ assert_equal too_deep, ok
+ ok = JSON.generate too_deep_ary, :max_nesting => 0
+ assert_equal too_deep, ok
+ end
+
+ def test_symbolize_names
+ assert_equal({ "foo" => "bar", "baz" => "quux" },
+ JSON.parse('{"foo":"bar", "baz":"quux"}'))
+ assert_equal({ :foo => "bar", :baz => "quux" },
+ JSON.parse('{"foo":"bar", "baz":"quux"}', :symbolize_names => true))
+ end
+
+ def test_load
+ assert_equal @hash, JSON.load(@json)
+ tempfile = Tempfile.open('json')
+ tempfile.write @json
+ tempfile.rewind
+ assert_equal @hash, JSON.load(tempfile)
+ stringio = StringIO.new(@json)
+ stringio.rewind
+ assert_equal @hash, JSON.load(stringio)
+ assert_equal nil, JSON.load(nil)
+ assert_equal nil, JSON.load('')
+ ensure
+ tempfile.close!
+ end
+
+ def test_load_with_options
+ small_hash = JSON("foo" => 'bar')
+ symbol_hash = { :foo => 'bar' }
+ assert_equal symbol_hash, JSON.load(small_hash, nil, :symbolize_names => true)
+ end
+
+ def test_dump
+ too_deep = '[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]'
+ assert_equal too_deep, JSON.dump(eval(too_deep))
+ assert_kind_of String, Marshal.dump(eval(too_deep))
+ assert_raises(ArgumentError) { JSON.dump(eval(too_deep), 100) }
+ assert_raises(ArgumentError) { Marshal.dump(eval(too_deep), 100) }
+ assert_equal too_deep, JSON.dump(eval(too_deep), 101)
+ assert_kind_of String, Marshal.dump(eval(too_deep), 101)
+ output = StringIO.new
+ JSON.dump(eval(too_deep), output)
+ assert_equal too_deep, output.string
+ output = StringIO.new
+ JSON.dump(eval(too_deep), output, 101)
+ assert_equal too_deep, output.string
+ end
+
+ def test_dump_should_modify_defaults
+ max_nesting = JSON.dump_default_options[:max_nesting]
+ JSON.dump([], StringIO.new, 10)
+ assert_equal max_nesting, JSON.dump_default_options[:max_nesting]
+ end
+
+ def test_big_integers
+ json1 = JSON([orig = (1 << 31) - 1])
+ assert_equal orig, JSON[json1][0]
+ json2 = JSON([orig = 1 << 31])
+ assert_equal orig, JSON[json2][0]
+ json3 = JSON([orig = (1 << 62) - 1])
+ assert_equal orig, JSON[json3][0]
+ json4 = JSON([orig = 1 << 62])
+ assert_equal orig, JSON[json4][0]
+ json5 = JSON([orig = 1 << 64])
+ assert_equal orig, JSON[json5][0]
+ end
+
+ if defined?(JSON::Ext::Parser)
+ def test_allocate
+ parser = JSON::Ext::Parser.new("{}")
+ assert_raise(TypeError, '[ruby-core:35079]') {parser.__send__(:initialize, "{}")}
+ parser = JSON::Ext::Parser.allocate
+ assert_raise(TypeError, '[ruby-core:35079]') {parser.source}
+ end
+ end
+
+ def test_argument_encoding
+ source = "{}".force_encoding("ascii-8bit")
+ JSON::Parser.new(source)
+ assert_equal Encoding::ASCII_8BIT, source.encoding
+ end if defined?(Encoding::ASCII_8BIT)
+end
diff --git a/test/json/test_json_addition.rb b/test/json/test_json_addition.rb
new file mode 100644
index 0000000000..a30f06addd
--- /dev/null
+++ b/test/json/test_json_addition.rb
@@ -0,0 +1,196 @@
+#!/usr/bin/env ruby
+# -*- coding:utf-8 -*-
+
+require 'test/unit'
+require File.join(File.dirname(__FILE__), 'setup_variant')
+require 'json/add/core'
+require 'json/add/complex'
+require 'json/add/rational'
+require 'json/add/bigdecimal'
+require 'json/add/ostruct'
+require 'date'
+
+class TestJSONAddition < Test::Unit::TestCase
+ include JSON
+
+ class A
+ def initialize(a)
+ @a = a
+ end
+
+ attr_reader :a
+
+ def ==(other)
+ a == other.a
+ end
+
+ def self.json_create(object)
+ new(*object['args'])
+ end
+
+ def to_json(*args)
+ {
+ 'json_class' => self.class.name,
+ 'args' => [ @a ],
+ }.to_json(*args)
+ end
+ end
+
+ class A2 < A
+ def to_json(*args)
+ {
+ 'json_class' => self.class.name,
+ 'args' => [ @a ],
+ }.to_json(*args)
+ end
+ end
+
+ class B
+ def self.json_creatable?
+ false
+ end
+
+ def to_json(*args)
+ {
+ 'json_class' => self.class.name,
+ }.to_json(*args)
+ end
+ end
+
+ class C
+ def self.json_creatable?
+ false
+ end
+
+ def to_json(*args)
+ {
+ 'json_class' => 'TestJSONAddition::Nix',
+ }.to_json(*args)
+ end
+ end
+
+ def test_extended_json
+ a = A.new(666)
+ assert A.json_creatable?
+ json = generate(a)
+ a_again = JSON.parse(json, :create_additions => true)
+ assert_kind_of a.class, a_again
+ assert_equal a, a_again
+ end
+
+ def test_extended_json_default
+ a = A.new(666)
+ assert A.json_creatable?
+ json = generate(a)
+ a_hash = JSON.parse(json)
+ assert_kind_of Hash, a_hash
+ end
+
+ def test_extended_json_disabled
+ a = A.new(666)
+ assert A.json_creatable?
+ json = generate(a)
+ a_again = JSON.parse(json, :create_additions => true)
+ assert_kind_of a.class, a_again
+ assert_equal a, a_again
+ a_hash = JSON.parse(json, :create_additions => false)
+ assert_kind_of Hash, a_hash
+ assert_equal(
+ {"args"=>[666], "json_class"=>"TestJSONAddition::A"}.sort_by { |k,| k },
+ a_hash.sort_by { |k,| k }
+ )
+ end
+
+ def test_extended_json_fail1
+ b = B.new
+ assert !B.json_creatable?
+ json = generate(b)
+ assert_equal({ "json_class"=>"TestJSONAddition::B" }, JSON.parse(json))
+ end
+
+ def test_extended_json_fail2
+ c = C.new
+ assert !C.json_creatable?
+ json = generate(c)
+ assert_raises(ArgumentError, NameError) { JSON.parse(json, :create_additions => true) }
+ end
+
+ def test_raw_strings
+ raw = ''
+ raw.respond_to?(:encode!) and raw.encode!(Encoding::ASCII_8BIT)
+ raw_array = []
+ for i in 0..255
+ raw << i
+ raw_array << i
+ end
+ json = raw.to_json_raw
+ json_raw_object = raw.to_json_raw_object
+ hash = { 'json_class' => 'String', 'raw'=> raw_array }
+ assert_equal hash, json_raw_object
+ assert_match(/\A\{.*\}\z/, json)
+ assert_match(/"json_class":"String"/, json)
+ assert_match(/"raw":\[0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255\]/, json)
+ raw_again = JSON.parse(json, :create_additions => true)
+ assert_equal raw, raw_again
+ end
+
+ MyJsonStruct = Struct.new 'MyJsonStruct', :foo, :bar
+
+ def test_core
+ t = Time.now
+ assert_equal t, JSON(JSON(t), :create_additions => true)
+ d = Date.today
+ assert_equal d, JSON(JSON(d), :create_additions => true)
+ d = DateTime.civil(2007, 6, 14, 14, 57, 10, Rational(1, 12), 2299161)
+ assert_equal d, JSON(JSON(d), :create_additions => true)
+ assert_equal 1..10, JSON(JSON(1..10), :create_additions => true)
+ assert_equal 1...10, JSON(JSON(1...10), :create_additions => true)
+ assert_equal "a".."c", JSON(JSON("a".."c"), :create_additions => true)
+ assert_equal "a"..."c", JSON(JSON("a"..."c"), :create_additions => true)
+ s = MyJsonStruct.new 4711, 'foot'
+ assert_equal s, JSON(JSON(s), :create_additions => true)
+ struct = Struct.new :foo, :bar
+ s = struct.new 4711, 'foot'
+ assert_raises(JSONError) { JSON(s) }
+ begin
+ raise TypeError, "test me"
+ rescue TypeError => e
+ e_json = JSON.generate e
+ e_again = JSON e_json, :create_additions => true
+ assert_kind_of TypeError, e_again
+ assert_equal e.message, e_again.message
+ assert_equal e.backtrace, e_again.backtrace
+ end
+ assert_equal(/foo/, JSON(JSON(/foo/), :create_additions => true))
+ assert_equal(/foo/i, JSON(JSON(/foo/i), :create_additions => true))
+ end
+
+ def test_utc_datetime
+ now = Time.now
+ d = DateTime.parse(now.to_s, :create_additions => true) # usual case
+ assert_equal d, JSON.parse(d.to_json, :create_additions => true)
+ d = DateTime.parse(now.utc.to_s) # of = 0
+ assert_equal d, JSON.parse(d.to_json, :create_additions => true)
+ d = DateTime.civil(2008, 6, 17, 11, 48, 32, Rational(1,24))
+ assert_equal d, JSON.parse(d.to_json, :create_additions => true)
+ d = DateTime.civil(2008, 6, 17, 11, 48, 32, Rational(12,24))
+ assert_equal d, JSON.parse(d.to_json, :create_additions => true)
+ end
+
+ def test_rational_complex
+ assert_equal Rational(2, 9), JSON.parse(JSON(Rational(2, 9)), :create_additions => true)
+ assert_equal Complex(2, 9), JSON.parse(JSON(Complex(2, 9)), :create_additions => true)
+ end
+
+ def test_bigdecimal
+ assert_equal BigDecimal('3.141', 23), JSON(JSON(BigDecimal('3.141', 23)), :create_additions => true)
+ assert_equal BigDecimal('3.141', 666), JSON(JSON(BigDecimal('3.141', 666)), :create_additions => true)
+ end
+
+ def test_ostruct
+ o = OpenStruct.new
+ # XXX this won't work; o.foo = { :bar => true }
+ o.foo = { 'bar' => true }
+ assert_equal o, JSON.parse(JSON(o), :create_additions => true)
+ end
+end
diff --git a/test/json/test_json_encoding.rb b/test/json/test_json_encoding.rb
new file mode 100644
index 0000000000..fa7d878920
--- /dev/null
+++ b/test/json/test_json_encoding.rb
@@ -0,0 +1,65 @@
+#!/usr/bin/env ruby
+# encoding: utf-8
+
+require 'test/unit'
+require File.join(File.dirname(__FILE__), 'setup_variant')
+
+class TestJSONEncoding < Test::Unit::TestCase
+ include JSON
+
+ def setup
+ @utf_8 = '["© ≠ €!"]'
+ @parsed = [ "© ≠ €!" ]
+ @generated = '["\u00a9 \u2260 \u20ac!"]'
+ if String.method_defined?(:encode)
+ @utf_16_data = [@parsed.first.encode('utf-16be', 'utf-8')]
+ @utf_8_ascii_8bit = @utf_8.dup.force_encoding(Encoding::ASCII_8BIT)
+ @utf_16be = @utf_8.encode('utf-16be', 'utf-8')
+ @utf_16be_ascii_8bit = @utf_16be.dup.force_encoding(Encoding::ASCII_8BIT)
+ @utf_16le = @utf_8.encode('utf-16le', 'utf-8')
+ @utf_16le_ascii_8bit = @utf_16le.dup.force_encoding(Encoding::ASCII_8BIT)
+ @utf_32be = @utf_8.encode('utf-32be', 'utf-8')
+ @utf_32be_ascii_8bit = @utf_32be.dup.force_encoding(Encoding::ASCII_8BIT)
+ @utf_32le = @utf_8.encode('utf-32le', 'utf-8')
+ @utf_32le_ascii_8bit = @utf_32le.dup.force_encoding(Encoding::ASCII_8BIT)
+ else
+ require 'iconv'
+ @utf_16_data = Iconv.iconv('utf-16be', 'utf-8', @parsed.first)
+ @utf_8_ascii_8bit = @utf_8.dup
+ @utf_16be, = Iconv.iconv('utf-16be', 'utf-8', @utf_8)
+ @utf_16be_ascii_8bit = @utf_16be.dup
+ @utf_16le, = Iconv.iconv('utf-16le', 'utf-8', @utf_8)
+ @utf_16le_ascii_8bit = @utf_16le.dup
+ @utf_32be, = Iconv.iconv('utf-32be', 'utf-8', @utf_8)
+ @utf_32be_ascii_8bit = @utf_32be.dup
+ @utf_32le, = Iconv.iconv('utf-32le', 'utf-8', @utf_8)
+ @utf_32le_ascii_8bit = @utf_32le.dup
+ end
+ end
+
+ def test_parse
+ assert_equal @parsed, JSON.parse(@utf_8)
+ assert_equal @parsed, JSON.parse(@utf_16be)
+ assert_equal @parsed, JSON.parse(@utf_16le)
+ assert_equal @parsed, JSON.parse(@utf_32be)
+ assert_equal @parsed, JSON.parse(@utf_32le)
+ end
+
+ def test_parse_ascii_8bit
+ assert_equal @parsed, JSON.parse(@utf_8_ascii_8bit)
+ assert_equal @parsed, JSON.parse(@utf_16be_ascii_8bit)
+ assert_equal @parsed, JSON.parse(@utf_16le_ascii_8bit)
+ assert_equal @parsed, JSON.parse(@utf_32be_ascii_8bit)
+ assert_equal @parsed, JSON.parse(@utf_32le_ascii_8bit)
+ end
+
+ def test_generate
+ assert_equal @generated, JSON.generate(@parsed, :ascii_only => true)
+ if defined?(::Encoding)
+ assert_equal @generated, JSON.generate(@utf_16_data, :ascii_only => true)
+ else
+ # XXX checking of correct utf8 data is not as strict (yet?) without :ascii_only
+ assert_raises(JSON::GeneratorError) { JSON.generate(@utf_16_data, :ascii_only => true) }
+ end
+ end
+end
diff --git a/test/json/test_json_fixtures.rb b/test/json/test_json_fixtures.rb
new file mode 100644
index 0000000000..584dffdfdb
--- /dev/null
+++ b/test/json/test_json_fixtures.rb
@@ -0,0 +1,35 @@
+#!/usr/bin/env ruby
+# encoding: utf-8
+
+require 'test/unit'
+require File.join(File.dirname(__FILE__), 'setup_variant')
+
+class TestJSONFixtures < Test::Unit::TestCase
+ def setup
+ fixtures = File.join(File.dirname(__FILE__), 'fixtures/*.json')
+ passed, failed = Dir[fixtures].partition { |f| f['pass'] }
+ @passed = passed.inject([]) { |a, f| a << [ f, File.read(f) ] }.sort
+ @failed = failed.inject([]) { |a, f| a << [ f, File.read(f) ] }.sort
+ end
+
+ def test_passing
+ for name, source in @passed
+ begin
+ assert JSON.parse(source),
+ "Did not pass for fixture '#{name}': #{source.inspect}"
+ rescue => e
+ warn "\nCaught #{e.class}(#{e}) for fixture '#{name}': #{source.inspect}\n#{e.backtrace * "\n"}"
+ raise e
+ end
+ end
+ end
+
+ def test_failing
+ for name, source in @failed
+ assert_raises(JSON::ParserError, JSON::NestingError,
+ "Did not fail for fixture '#{name}': #{source.inspect}") do
+ JSON.parse(source)
+ end
+ end
+ end
+end
diff --git a/test/json/test_json_generate.rb b/test/json/test_json_generate.rb
new file mode 100644
index 0000000000..1834290944
--- /dev/null
+++ b/test/json/test_json_generate.rb
@@ -0,0 +1,347 @@
+#!/usr/bin/env ruby
+# encoding: utf-8
+
+require 'test/unit'
+require File.join(File.dirname(__FILE__), 'setup_variant')
+
+class TestJSONGenerate < Test::Unit::TestCase
+ include JSON
+
+ def setup
+ @hash = {
+ 'a' => 2,
+ 'b' => 3.141,
+ 'c' => 'c',
+ 'd' => [ 1, "b", 3.14 ],
+ 'e' => { 'foo' => 'bar' },
+ 'g' => "\"\0\037",
+ 'h' => 1000.0,
+ 'i' => 0.001
+ }
+ @json2 = '{"a":2,"b":3.141,"c":"c","d":[1,"b",3.14],"e":{"foo":"bar"},' +
+ '"g":"\\"\\u0000\\u001f","h":1000.0,"i":0.001}'
+ @json3 = <<'EOT'.chomp
+{
+ "a": 2,
+ "b": 3.141,
+ "c": "c",
+ "d": [
+ 1,
+ "b",
+ 3.14
+ ],
+ "e": {
+ "foo": "bar"
+ },
+ "g": "\"\u0000\u001f",
+ "h": 1000.0,
+ "i": 0.001
+}
+EOT
+ end
+
+ def test_generate
+ json = generate(@hash)
+ assert_equal(JSON.parse(@json2), JSON.parse(json))
+ json = JSON[@hash]
+ assert_equal(JSON.parse(@json2), JSON.parse(json))
+ parsed_json = parse(json)
+ assert_equal(@hash, parsed_json)
+ json = generate({1=>2})
+ assert_equal('{"1":2}', json)
+ parsed_json = parse(json)
+ assert_equal({"1"=>2}, parsed_json)
+ assert_raise(GeneratorError) { generate(666) }
+ assert_equal '666', generate(666, :quirks_mode => true)
+ end
+
+ def test_generate_pretty
+ json = pretty_generate(@hash)
+ # hashes aren't (insertion) ordered on every ruby implementation assert_equal(@json3, json)
+ assert_equal(JSON.parse(@json3), JSON.parse(json))
+ parsed_json = parse(json)
+ assert_equal(@hash, parsed_json)
+ json = pretty_generate({1=>2})
+ assert_equal(<<'EOT'.chomp, json)
+{
+ "1": 2
+}
+EOT
+ parsed_json = parse(json)
+ assert_equal({"1"=>2}, parsed_json)
+ assert_raise(GeneratorError) { pretty_generate(666) }
+ assert_equal '666', pretty_generate(666, :quirks_mode => true)
+ end
+
+ def test_generate_custom
+ state = State.new(:space_before => " ", :space => " ", :indent => "<i>", :object_nl => "\n", :array_nl => "<a_nl>")
+ json = generate({1=>{2=>3,4=>[5,6]}}, state)
+ assert_equal(<<'EOT'.chomp, json)
+{
+<i>"1" : {
+<i><i>"2" : 3,
+<i><i>"4" : [<a_nl><i><i><i>5,<a_nl><i><i><i>6<a_nl><i><i>]
+<i>}
+}
+EOT
+ end
+
+ def test_fast_generate
+ json = fast_generate(@hash)
+ assert_equal(JSON.parse(@json2), JSON.parse(json))
+ parsed_json = parse(json)
+ assert_equal(@hash, parsed_json)
+ json = fast_generate({1=>2})
+ assert_equal('{"1":2}', json)
+ parsed_json = parse(json)
+ assert_equal({"1"=>2}, parsed_json)
+ assert_raise(GeneratorError) { fast_generate(666) }
+ assert_equal '666', fast_generate(666, :quirks_mode => true)
+ end
+
+ def test_own_state
+ state = State.new
+ json = generate(@hash, state)
+ assert_equal(JSON.parse(@json2), JSON.parse(json))
+ parsed_json = parse(json)
+ assert_equal(@hash, parsed_json)
+ json = generate({1=>2}, state)
+ assert_equal('{"1":2}', json)
+ parsed_json = parse(json)
+ assert_equal({"1"=>2}, parsed_json)
+ assert_raise(GeneratorError) { generate(666, state) }
+ state.quirks_mode = true
+ assert state.quirks_mode?
+ assert_equal '666', generate(666, state)
+ end
+
+ def test_states
+ json = generate({1=>2}, nil)
+ assert_equal('{"1":2}', json)
+ s = JSON.state.new
+ assert s.check_circular?
+ assert s[:check_circular?]
+ h = { 1=>2 }
+ h[3] = h
+ assert_raises(JSON::NestingError) { generate(h) }
+ assert_raises(JSON::NestingError) { generate(h, s) }
+ s = JSON.state.new
+ a = [ 1, 2 ]
+ a << a
+ assert_raises(JSON::NestingError) { generate(a, s) }
+ assert s.check_circular?
+ assert s[:check_circular?]
+ end
+
+ def test_pretty_state
+ state = PRETTY_STATE_PROTOTYPE.dup
+ assert_equal({
+ :allow_nan => false,
+ :array_nl => "\n",
+ :ascii_only => false,
+ :buffer_initial_length => 1024,
+ :quirks_mode => false,
+ :depth => 0,
+ :indent => " ",
+ :max_nesting => 100,
+ :object_nl => "\n",
+ :space => " ",
+ :space_before => "",
+ }.sort_by { |n,| n.to_s }, state.to_h.sort_by { |n,| n.to_s })
+ end
+
+ def test_safe_state
+ state = SAFE_STATE_PROTOTYPE.dup
+ assert_equal({
+ :allow_nan => false,
+ :array_nl => "",
+ :ascii_only => false,
+ :buffer_initial_length => 1024,
+ :quirks_mode => false,
+ :depth => 0,
+ :indent => "",
+ :max_nesting => 100,
+ :object_nl => "",
+ :space => "",
+ :space_before => "",
+ }.sort_by { |n,| n.to_s }, state.to_h.sort_by { |n,| n.to_s })
+ end
+
+ def test_fast_state
+ state = FAST_STATE_PROTOTYPE.dup
+ assert_equal({
+ :allow_nan => false,
+ :array_nl => "",
+ :ascii_only => false,
+ :buffer_initial_length => 1024,
+ :quirks_mode => false,
+ :depth => 0,
+ :indent => "",
+ :max_nesting => 0,
+ :object_nl => "",
+ :space => "",
+ :space_before => "",
+ }.sort_by { |n,| n.to_s }, state.to_h.sort_by { |n,| n.to_s })
+ end
+
+ def test_allow_nan
+ assert_raises(GeneratorError) { generate([JSON::NaN]) }
+ assert_equal '[NaN]', generate([JSON::NaN], :allow_nan => true)
+ assert_raises(GeneratorError) { fast_generate([JSON::NaN]) }
+ assert_raises(GeneratorError) { pretty_generate([JSON::NaN]) }
+ assert_equal "[\n NaN\n]", pretty_generate([JSON::NaN], :allow_nan => true)
+ assert_raises(GeneratorError) { generate([JSON::Infinity]) }
+ assert_equal '[Infinity]', generate([JSON::Infinity], :allow_nan => true)
+ assert_raises(GeneratorError) { fast_generate([JSON::Infinity]) }
+ assert_raises(GeneratorError) { pretty_generate([JSON::Infinity]) }
+ assert_equal "[\n Infinity\n]", pretty_generate([JSON::Infinity], :allow_nan => true)
+ assert_raises(GeneratorError) { generate([JSON::MinusInfinity]) }
+ assert_equal '[-Infinity]', generate([JSON::MinusInfinity], :allow_nan => true)
+ assert_raises(GeneratorError) { fast_generate([JSON::MinusInfinity]) }
+ assert_raises(GeneratorError) { pretty_generate([JSON::MinusInfinity]) }
+ assert_equal "[\n -Infinity\n]", pretty_generate([JSON::MinusInfinity], :allow_nan => true)
+ end
+
+ def test_depth
+ ary = []; ary << ary
+ assert_equal 0, JSON::SAFE_STATE_PROTOTYPE.depth
+ assert_raises(JSON::NestingError) { JSON.generate(ary) }
+ assert_equal 0, JSON::SAFE_STATE_PROTOTYPE.depth
+ assert_equal 0, JSON::PRETTY_STATE_PROTOTYPE.depth
+ assert_raises(JSON::NestingError) { JSON.pretty_generate(ary) }
+ assert_equal 0, JSON::PRETTY_STATE_PROTOTYPE.depth
+ s = JSON.state.new
+ assert_equal 0, s.depth
+ assert_raises(JSON::NestingError) { ary.to_json(s) }
+ assert_equal 100, s.depth
+ end
+
+ def test_buffer_initial_length
+ s = JSON.state.new
+ assert_equal 1024, s.buffer_initial_length
+ s.buffer_initial_length = 0
+ assert_equal 1024, s.buffer_initial_length
+ s.buffer_initial_length = -1
+ assert_equal 1024, s.buffer_initial_length
+ s.buffer_initial_length = 128
+ assert_equal 128, s.buffer_initial_length
+ end
+
+ def test_gc
+ if respond_to?(:assert_in_out_err)
+ assert_in_out_err(%w[-rjson --disable-gems], <<-EOS, [], [])
+ bignum_too_long_to_embed_as_string = 1234567890123456789012345
+ expect = bignum_too_long_to_embed_as_string.to_s
+ GC.stress = true
+
+ 10.times do |i|
+ tmp = bignum_too_long_to_embed_as_string.to_json
+ raise "'\#{expect}' is expected, but '\#{tmp}'" unless tmp == expect
+ end
+ EOS
+ end
+ end if GC.respond_to?(:stress=)
+
+ def test_configure_using_configure_and_merge
+ numbered_state = {
+ :indent => "1",
+ :space => '2',
+ :space_before => '3',
+ :object_nl => '4',
+ :array_nl => '5'
+ }
+ state1 = JSON.state.new
+ state1.merge(numbered_state)
+ assert_equal '1', state1.indent
+ assert_equal '2', state1.space
+ assert_equal '3', state1.space_before
+ assert_equal '4', state1.object_nl
+ assert_equal '5', state1.array_nl
+ state2 = JSON.state.new
+ state2.configure(numbered_state)
+ assert_equal '1', state2.indent
+ assert_equal '2', state2.space
+ assert_equal '3', state2.space_before
+ assert_equal '4', state2.object_nl
+ assert_equal '5', state2.array_nl
+ end
+
+ def test_configure_hash_conversion
+ state = JSON.state.new
+ state.configure(:indent => '1')
+ assert_equal '1', state.indent
+ state = JSON.state.new
+ foo = 'foo'
+ assert_raise(TypeError) do
+ state.configure(foo)
+ end
+ def foo.to_h
+ { :indent => '2' }
+ end
+ state.configure(foo)
+ assert_equal '2', state.indent
+ end
+
+ if defined?(JSON::Ext::Generator)
+ def test_broken_bignum # [ruby-core:38867]
+ pid = fork do
+ Bignum.class_eval do
+ def to_s
+ end
+ end
+ begin
+ JSON::Ext::Generator::State.new.generate(1<<64)
+ exit 1
+ rescue TypeError
+ exit 0
+ end
+ end
+ _, status = Process.waitpid2(pid)
+ assert status.success?
+ rescue NotImplementedError
+ # forking to avoid modifying core class of a parent process and
+ # introducing race conditions of tests are run in parallel
+ end
+ end
+
+ def test_hash_likeness_set_symbol
+ state = JSON.state.new
+ assert_equal nil, state[:foo]
+ assert_equal nil.class, state[:foo].class
+ assert_equal nil, state['foo']
+ state[:foo] = :bar
+ assert_equal :bar, state[:foo]
+ assert_equal :bar, state['foo']
+ state_hash = state.to_hash
+ assert_kind_of Hash, state_hash
+ assert_equal :bar, state_hash[:foo]
+ end
+
+ def test_hash_likeness_set_string
+ state = JSON.state.new
+ assert_equal nil, state[:foo]
+ assert_equal nil, state['foo']
+ state['foo'] = :bar
+ assert_equal :bar, state[:foo]
+ assert_equal :bar, state['foo']
+ state_hash = state.to_hash
+ assert_kind_of Hash, state_hash
+ assert_equal :bar, state_hash[:foo]
+ end
+
+ def test_json_generate
+ assert_raise JSON::GeneratorError do
+ assert_equal true, JSON.generate(["\xea"])
+ end
+ end
+
+ def test_string_subclass
+ s = Class.new(String) do
+ def to_s; self; end
+ undef to_json
+ end
+ assert_nothing_raised(SystemStackError) do
+ assert_equal '[""]', JSON.generate([s.new])
+ end
+ end
+end
diff --git a/test/json/test_json_generic_object.rb b/test/json/test_json_generic_object.rb
new file mode 100644
index 0000000000..c43c7762be
--- /dev/null
+++ b/test/json/test_json_generic_object.rb
@@ -0,0 +1,75 @@
+#!/usr/bin/env ruby
+# encoding: utf-8
+
+require 'test/unit'
+require File.join(File.dirname(__FILE__), 'setup_variant')
+class TestJSONGenericObject < Test::Unit::TestCase
+ include JSON
+
+ def setup
+ @go = GenericObject[ :a => 1, :b => 2 ]
+ end
+
+ def test_attributes
+ assert_equal 1, @go.a
+ assert_equal 1, @go[:a]
+ assert_equal 2, @go.b
+ assert_equal 2, @go[:b]
+ assert_nil @go.c
+ assert_nil @go[:c]
+ end
+
+ def test_generate_json
+ switch_json_creatable do
+ assert_equal @go, JSON(JSON(@go), :create_additions => true)
+ end
+ end
+
+ def test_parse_json
+ assert_kind_of Hash, JSON('{ "json_class": "JSON::GenericObject", "a": 1, "b": 2 }', :create_additions => true)
+ switch_json_creatable do
+ assert_equal @go, l = JSON('{ "json_class": "JSON::GenericObject", "a": 1, "b": 2 }', :create_additions => true)
+ assert_equal 1, l.a
+ assert_equal @go, l = JSON('{ "a": 1, "b": 2 }', :object_class => GenericObject)
+ assert_equal 1, l.a
+ assert_equal GenericObject[:a => GenericObject[:b => 2]],
+ l = JSON('{ "a": { "b": 2 } }', :object_class => GenericObject)
+ assert_equal 2, l.a.b
+ end
+ end
+
+ def test_from_hash
+ result = GenericObject.from_hash(
+ :foo => { :bar => { :baz => true }, :quux => [ { :foobar => true } ] })
+ assert_kind_of GenericObject, result.foo
+ assert_kind_of GenericObject, result.foo.bar
+ assert_equal true, result.foo.bar.baz
+ assert_kind_of GenericObject, result.foo.quux.first
+ assert_equal true, result.foo.quux.first.foobar
+ assert_equal true, GenericObject.from_hash(true)
+ end
+
+ def test_json_generic_object_load
+ empty = JSON::GenericObject.load(nil)
+ assert_kind_of JSON::GenericObject, empty
+ simple_json = '{"json_class":"JSON::GenericObject","hello":"world"}'
+ simple = JSON::GenericObject.load(simple_json)
+ assert_kind_of JSON::GenericObject, simple
+ assert_equal "world", simple.hello
+ converting = JSON::GenericObject.load('{ "hello": "world" }')
+ assert_kind_of JSON::GenericObject, converting
+ assert_equal "world", converting.hello
+
+ json = JSON::GenericObject.dump(JSON::GenericObject[:hello => 'world'])
+ assert_equal JSON(json), JSON('{"json_class":"JSON::GenericObject","hello":"world"}')
+ end
+
+ private
+
+ def switch_json_creatable
+ JSON::GenericObject.json_creatable = true
+ yield
+ ensure
+ JSON::GenericObject.json_creatable = false
+ end
+end
diff --git a/test/json/test_json_string_matching.rb b/test/json/test_json_string_matching.rb
new file mode 100644
index 0000000000..c233df8c2c
--- /dev/null
+++ b/test/json/test_json_string_matching.rb
@@ -0,0 +1,39 @@
+#!/usr/bin/env ruby
+# encoding: utf-8
+
+require 'test/unit'
+require File.join(File.dirname(__FILE__), 'setup_variant')
+require 'stringio'
+require 'time'
+
+class TestJSONStringMatching < Test::Unit::TestCase
+ include JSON
+
+ class TestTime < ::Time
+ def self.json_create(string)
+ Time.parse(string)
+ end
+
+ def to_json(*)
+ %{"#{strftime('%FT%T%z')}"}
+ end
+
+ def ==(other)
+ to_i == other.to_i
+ end
+ end
+
+ def test_match_date
+ t = TestTime.new
+ t_json = [ t ].to_json
+ assert_equal [ t ],
+ JSON.parse(t_json, :create_additions => true,
+ :match_string => { /\A\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}[+-]\d{4}\z/ => TestTime })
+ assert_equal [ t.strftime('%FT%T%z') ],
+ JSON.parse(t_json, :create_additions => true,
+ :match_string => { /\A\d{3}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}[+-]\d{4}\z/ => TestTime })
+ assert_equal [ t.strftime('%FT%T%z') ],
+ JSON.parse(t_json,
+ :match_string => { /\A\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}[+-]\d{4}\z/ => TestTime })
+ end
+end
diff --git a/test/json/test_json_unicode.rb b/test/json/test_json_unicode.rb
new file mode 100644
index 0000000000..8352d5c6c6
--- /dev/null
+++ b/test/json/test_json_unicode.rb
@@ -0,0 +1,72 @@
+#!/usr/bin/env ruby
+# encoding: utf-8
+
+require 'test/unit'
+require File.join(File.dirname(__FILE__), 'setup_variant')
+
+class TestJSONUnicode < Test::Unit::TestCase
+ include JSON
+
+ def test_unicode
+ assert_equal '""', ''.to_json
+ assert_equal '"\\b"', "\b".to_json
+ assert_equal '"\u0001"', 0x1.chr.to_json
+ assert_equal '"\u001f"', 0x1f.chr.to_json
+ assert_equal '" "', ' '.to_json
+ assert_equal "\"#{0x7f.chr}\"", 0x7f.chr.to_json
+ utf8 = [ "© ≠ €! \01" ]
+ json = '["© ≠ €! \u0001"]'
+ assert_equal json, utf8.to_json(:ascii_only => false)
+ assert_equal utf8, parse(json)
+ json = '["\u00a9 \u2260 \u20ac! \u0001"]'
+ assert_equal json, utf8.to_json(:ascii_only => true)
+ assert_equal utf8, parse(json)
+ utf8 = ["\343\201\202\343\201\204\343\201\206\343\201\210\343\201\212"]
+ json = "[\"\343\201\202\343\201\204\343\201\206\343\201\210\343\201\212\"]"
+ assert_equal utf8, parse(json)
+ assert_equal json, utf8.to_json(:ascii_only => false)
+ utf8 = ["\343\201\202\343\201\204\343\201\206\343\201\210\343\201\212"]
+ assert_equal utf8, parse(json)
+ json = "[\"\\u3042\\u3044\\u3046\\u3048\\u304a\"]"
+ assert_equal json, utf8.to_json(:ascii_only => true)
+ assert_equal utf8, parse(json)
+ utf8 = ['საქართველო']
+ json = '["საქართველო"]'
+ assert_equal json, utf8.to_json(:ascii_only => false)
+ json = "[\"\\u10e1\\u10d0\\u10e5\\u10d0\\u10e0\\u10d7\\u10d5\\u10d4\\u10da\\u10dd\"]"
+ assert_equal json, utf8.to_json(:ascii_only => true)
+ assert_equal utf8, parse(json)
+ assert_equal '["Ã"]', JSON.generate(["Ã"], :ascii_only => false)
+ assert_equal '["\\u00c3"]', JSON.generate(["Ã"], :ascii_only => true)
+ assert_equal ["€"], JSON.parse('["\u20ac"]')
+ utf8 = ["\xf0\xa0\x80\x81"]
+ json = "[\"\xf0\xa0\x80\x81\"]"
+ assert_equal json, JSON.generate(utf8, :ascii_only => false)
+ assert_equal utf8, JSON.parse(json)
+ json = '["\ud840\udc01"]'
+ assert_equal json, JSON.generate(utf8, :ascii_only => true)
+ assert_equal utf8, JSON.parse(json)
+ end
+
+ def test_chars
+ (0..0x7f).each do |i|
+ json = '["\u%04x"]' % i
+ if RUBY_VERSION >= "1.9."
+ i = i.chr
+ end
+ assert_equal i, JSON.parse(json).first[0]
+ if i == ?\b
+ generated = JSON.generate(["" << i])
+ assert '["\b"]' == generated || '["\10"]' == generated
+ elsif [?\n, ?\r, ?\t, ?\f].include?(i)
+ assert_equal '[' << ('' << i).dump << ']', JSON.generate(["" << i])
+ elsif i.chr < 0x20.chr
+ assert_equal json, JSON.generate(["" << i])
+ end
+ end
+ assert_raise(JSON::GeneratorError) do
+ JSON.generate(["\x80"], :ascii_only => true)
+ end
+ assert_equal "\302\200", JSON.parse('["\u0080"]').first
+ end
+end
diff --git a/test/rdoc/test_rdoc_generator_darkfish.rb b/test/rdoc/test_rdoc_generator_darkfish.rb
index 204983d97b..fc77e4b105 100644
--- a/test/rdoc/test_rdoc_generator_darkfish.rb
+++ b/test/rdoc/test_rdoc_generator_darkfish.rb
@@ -225,4 +225,5 @@ class TestRDocGeneratorDarkfish < RDoc::TestCase
"#{filename} is not hard-linked"
end
-end if defined?(JSON)
+end
+
diff --git a/test/rdoc/test_rdoc_generator_json_index.rb b/test/rdoc/test_rdoc_generator_json_index.rb
index c95820ca1e..9283fd9b76 100644
--- a/test/rdoc/test_rdoc_generator_json_index.rb
+++ b/test/rdoc/test_rdoc_generator_json_index.rb
@@ -320,4 +320,5 @@ class TestRDocGeneratorJsonIndex < RDoc::TestCase
assert_equal 'cd', @g.search_string('C d')
end
-end if defined?(JSON)
+end
+
diff --git a/test/rdoc/test_rdoc_rubygems_hook.rb b/test/rdoc/test_rdoc_rubygems_hook.rb
index 0d5e11ab89..960afc9ed4 100644
--- a/test/rdoc/test_rdoc_rubygems_hook.rb
+++ b/test/rdoc/test_rdoc_rubygems_hook.rb
@@ -66,7 +66,7 @@ class TestRDocRubygemsHook < Gem::TestCase
@hook.document 'darkfish', options, @a.doc_dir('rdoc')
assert @hook.rdoc_installed?
- end if defined?(JSON)
+ end
def test_generate
FileUtils.mkdir_p @a.doc_dir
@@ -105,7 +105,7 @@ class TestRDocRubygemsHook < Gem::TestCase
assert_equal %w[README lib], rdoc.options.files.sort
assert_equal 'MyTitle', rdoc.store.main
- end if defined?(JSON)
+ end
def test_generate_configuration_rdoc_array
Gem.configuration[:rdoc] = %w[-A]
@@ -248,3 +248,4 @@ class TestRDocRubygemsHook < Gem::TestCase
end
end
+
diff --git a/test/rdoc/test_rdoc_servlet.rb b/test/rdoc/test_rdoc_servlet.rb
index c9449f3631..143e2f225b 100644
--- a/test/rdoc/test_rdoc_servlet.rb
+++ b/test/rdoc/test_rdoc_servlet.rb
@@ -531,4 +531,5 @@ class TestRDocServlet < RDoc::TestCase
store.save
end
-end if defined?(JSON)
+end
+
diff --git a/test/ruby/test_extlibs.rb b/test/ruby/test_extlibs.rb
index 275b37b567..71dcf2b133 100644
--- a/test/ruby/test_extlibs.rb
+++ b/test/ruby/test_extlibs.rb
@@ -59,6 +59,7 @@ class TestExtLibs < Test::Unit::TestCase
check_existence "io/console"
check_existence "io/nonblock"
check_existence "io/wait"
+ check_existence "json"
check_existence "mathn/complex"
check_existence "mathn/rational"
check_existence "nkf"
diff --git a/test/rubygems/test_gem_commands_install_command.rb b/test/rubygems/test_gem_commands_install_command.rb
index c165479e76..f03285ae85 100644
--- a/test/rubygems/test_gem_commands_install_command.rb
+++ b/test/rubygems/test_gem_commands_install_command.rb
@@ -379,7 +379,7 @@ ERROR: Possible alternatives: non_existent_with_hint
wait_for_child_process_to_exit
assert_path_exists File.join(a2.doc_dir, 'ri')
- assert_path_exists File.join(a2.doc_dir, 'rdoc') if defined?(JSON)
+ assert_path_exists File.join(a2.doc_dir, 'rdoc')
end
def test_execute_saves_build_args
diff --git a/test/rubygems/test_gem_commands_update_command.rb b/test/rubygems/test_gem_commands_update_command.rb
index 4387eea20b..6a327068e2 100644
--- a/test/rubygems/test_gem_commands_update_command.rb
+++ b/test/rubygems/test_gem_commands_update_command.rb
@@ -265,7 +265,7 @@ class TestGemCommandsUpdateCommand < Gem::TestCase
a2 = @specs['a-2']
assert_path_exists File.join(a2.doc_dir, 'rdoc')
- end if defined?(JSON)
+ end
def test_execute_named
spec_fetcher do |fetcher|
@@ -540,3 +540,4 @@ class TestGemCommandsUpdateCommand < Gem::TestCase
end
end
+