]> granicus.if.org Git - multimarkdown/commitdiff
CHANGED: Restructure strong/emph to a nested structure
authorFletcher T. Penney <fletcher@fletcherpenney.net>
Thu, 4 May 2017 18:24:23 +0000 (14:24 -0400)
committerFletcher T. Penney <fletcher@fletcherpenney.net>
Thu, 4 May 2017 18:24:23 +0000 (14:24 -0400)
Sources/libMultiMarkdown/html.c
Sources/libMultiMarkdown/include/libMultiMarkdown.h
Sources/libMultiMarkdown/latex.c
Sources/libMultiMarkdown/mmd.c
Sources/libMultiMarkdown/odf.c
Sources/libMultiMarkdown/token.c
Sources/libMultiMarkdown/writer.c

index f252d37ac8aca11e0f03dbd15f67ea001b0264a4..1a5b21b76882abb0f77accb35c37d384eac80a82 100644 (file)
@@ -1508,11 +1508,13 @@ void mmd_export_token_html(DString * out, const char * source, token * t, scratc
                case PAIR_HTML_COMMENT:
                        print_token(t);
                        break;
+               case PAIR_EMPH:
                case PAIR_MATH:
                case PAIR_PAREN:
                case PAIR_QUOTE_DOUBLE:
                case PAIR_QUOTE_SINGLE:
                case PAIR_STAR:
+               case PAIR_STRONG:
                case PAIR_UL:
                        mmd_export_token_tree_html(out, source, t->child, scratch);
                        break;
index 2950a1aaab96ea97cde2631528443648c5d834e5..649706fac0c39899f076516f091ecf95153b319b 100644 (file)
@@ -202,6 +202,7 @@ enum token_types {
        PAIR_BRACKET_CITATION,
        PAIR_BRACKET_IMAGE,
        PAIR_BRACKET_VARIABLE,
+       PAIR_EMPH,
        PAIR_MATH,
        PAIR_PAREN,
        PAIR_QUOTE_SINGLE,
@@ -209,6 +210,7 @@ enum token_types {
        PAIR_QUOTE_ALT,
        PAIR_SUPERSCRIPT,
        PAIR_STAR,
+       PAIR_STRONG,
        PAIR_UL,
        PAIR_BRACES,
 
index 309f92fea17198c6d865bb96e75f530257b90c76..836bdf4023e22baaa694aefd9b71bccc597464a6 100644 (file)
@@ -1460,11 +1460,13 @@ void mmd_export_token_latex(DString * out, const char * source, token * t, scrat
 
                        if (strncmp(&source[t->child->start + t->child->len], "\\begin", 6) != 0)
                                mmd_export_token_latex(out, source, t->child->mate, scratch);
-                       break;                  
+                       break;
+               case PAIR_EMPH:
                case PAIR_PAREN:
                case PAIR_QUOTE_DOUBLE:
                case PAIR_QUOTE_SINGLE:
                case PAIR_STAR:
+               case PAIR_STRONG:
                case PAIR_UL:
                        mmd_export_token_tree_latex(out, source, t->child, scratch);
                        break;
index 399d0c209ccebb6d35cb407576328234cb15628e..e6f370497def4c6faf91273951463e997b56d895 100644 (file)
@@ -1432,9 +1432,12 @@ void pair_emphasis_tokens(token * t) {
                                                
                                                tokens_prune(t->next, t->next);
                                                tokens_prune(closer->prev, closer->prev);
+
+                                               token_prune_graft(t, closer, PAIR_STRONG);
                                        } else {
                                                t->type = EMPH_START;
                                                closer->type = EMPH_STOP;
+                                               token_prune_graft(t, closer, PAIR_EMPH);
                                        }
                                        break;
                                        
@@ -1668,17 +1671,12 @@ void parse_table_row_into_cells(token * row) {
                last = first;
        }
 
-
        walker = walker->next;
 
        while (walker) {
                switch (walker->type) {
                        case PIPE:
-                               if (row->child == first) {
-                                       row->child = token_prune_graft(first, last, TABLE_CELL);
-                               } else {
-                                       token_prune_graft(first, last, TABLE_CELL);
-                               }
+                               token_prune_graft(first, last, TABLE_CELL);
                                first = NULL;
                                last = NULL;
                                walker->type = TABLE_DIVIDER;
@@ -1696,11 +1694,7 @@ void parse_table_row_into_cells(token * row) {
        }
 
        if (first) {
-               if (row->child == first) {
-                       row->child = token_prune_graft(first, last, TABLE_CELL);
-               } else {
-                       token_prune_graft(first, last, TABLE_CELL);
-               }
+               token_prune_graft(first, last, TABLE_CELL);
        }
 }
 
index 7eaf5e8b7cbfc1370cf37ef29e963d9b350941b0..dccb0b3f550ec8481b1144c2f96204d7b9e236e3 100644 (file)
@@ -1384,11 +1384,13 @@ void mmd_export_token_odf(DString * out, const char * source, token * t, scratch
                        break;
                case PAIR_HTML_COMMENT:
                        break;
+               case PAIR_EMPH:
                case PAIR_MATH:
                case PAIR_PAREN:
                case PAIR_QUOTE_DOUBLE:
                case PAIR_QUOTE_SINGLE:
                case PAIR_STAR:
+               case PAIR_STRONG:
                case PAIR_UL:
                        mmd_export_token_tree_odf(out, source, t->child, scratch);
                        break;
index 32a719f3a4b8c8ef19482fdde9894dd767bfd2d2..a1a1b50860e71a6246c3b39ff7656e27b3ff2d19 100644 (file)
@@ -142,6 +142,22 @@ token * token_new(unsigned short type, size_t start, size_t len) {
 }
 
 
+/// Duplicate an existing token
+token * token_copy(token * original) {
+#ifdef kUseObjectPool
+       token * t = pool_allocate_object(token_pool);
+#else
+       token * t = malloc(sizeof(token));
+#endif
+
+       if (t) {
+               * t = * original;
+       }
+
+       return t;
+}
+
+
 /// Create a parent for a chain of tokens
 token * token_new_parent(token * child, unsigned short type) {
        if (child == NULL) {
@@ -307,34 +323,39 @@ token * token_prune_graft(token * first, token * last, unsigned short container_
        token * prev = first->prev;
        token * next = last->next;
        
-       // If we are head of chain, remember tail
-       token * tail = NULL;
-       if (prev == NULL)
-               tail = first->tail;
-
-
-       token * container = token_new(container_type, first->start, last->start + last->len - first->start);
-       
-       container->child = first;
-       container->next = next;
-       container->prev = prev;
-       container->can_close = 0;
-       container->can_open = 0;
-
-       if (tail)
-               container->tail = tail;
-
-       if (prev)
-               prev->next = container;
+       // Duplicate first token -- this will be child of new container
+       token * new_child = token_copy(first);
+       new_child->prev = NULL;
+       new_child->tail = last;
+       if (new_child->next) {
+               new_child->next->prev = new_child;
+       }
 
-       first->prev = NULL;
+       // Swap last (if necessary)
+       if (first == last)
+               last = new_child;
+
+       // Existing first token will be new container
+       first->child = new_child;
+       first->type = container_type;
+       first->len = last->start + last->len - first->start;
+       first->next = next;
+       first->can_close = 0;
+       first->can_open = 0;
+
+       // Fix mating
+       if (first->mate) {
+               first->mate = NULL;
+               new_child->mate->mate = new_child;
+       }
 
+       // Disconnect last token
        last->next = NULL;
 
        if (next)
-               next->prev = container;
+               next->prev = first;
 
-       return container;
+       return first;
 }
 
 
index 89475174b069cb01e2012fa712b67ade28ceaf1d..7eac17b2508e90a31103072822127585da3a5ddb 100644 (file)
@@ -336,6 +336,10 @@ void print_token_raw(DString * out, const char * source, token * t) {
                        case STRONG_STOP:
                        case TEXT_EMPTY:
                                break;
+                       case PAIR_EMPH:
+                       case PAIR_STRONG:
+                               print_token_tree_raw(out, source, t->child);
+                               break;
                        default:
                                d_string_append_c_array(out, &source[t->start], t->len);
                                break;