C语言 添加到前缀树时的分段错误



>我正在尝试编写一个函数将文本文件中的单词输入到前缀树中,但它一直给我分段错误

int wordCount = 0;
typedef struct node
{
    bool isWord;
    struct node* children[26];
}node;
struct node* newNode(struct node* n)
{
    n = (struct node*)malloc(sizeof(struct node));
    n->isWord = false;
    for (int i = 0; i < 26; i++)
        n->children[i] = 0;    
    return n;
}
struct node* n = NULL;
void append(char* s)
{
    struct node* m = NULL;
    m = n;
    for (int i = 0, p = strlen(s); i < p; i++)
    {
        if (m->children[s[i] - 'a'] == NULL)
            m->children[s[i] - 'a'] = newNode(m->children[s[i] - 'a']);
        m = m->children[s[i] - 'a'];
        if (i == p - 1)
        {
            m->isWord = true;
            wordCount++;
        }            
    }              
}
bool load(const char* dictionary)
{
    FILE* f = fopen(dictionary, "r");
    n = newNode(n);
    if (f == NULL)
        return false;
    char s[100];
    while (f != NULL && !feof(f))
    {
        fgets(s, sizeof(s), f);       
        for (int i = 0, j = strlen(s); i < j; i++)
            s[i] = tolower(s[i]);  
        append(s);
    }
    return true;
}

经过一些测试,我很确定问题出在追加功能上。

问题中的代码(截至 2014-02-25 20:55 -08:00)改编到程序中(添加标头 <assert.h><ctype.h><stdbool.h><stdio.h><stdlib.h><string.h>;在内存分配后添加一个断言,修复 fgets() 的使用),并从数据中删除换行符,并在读取时打印行(单词),valgrind使其无崩溃(但非常泄漏)运行。

法典:

#include <assert.h>
#include <ctype.h>
#include <stdbool.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
static int wordCount = 0;
typedef struct node
{
    bool isWord;
    struct node *children[26];
} node;
static struct node *newNode(struct node *n)
{
    n = (struct node *)malloc(sizeof(struct node));
    assert(n != 0);
    n->isWord = false;
    for (int i = 0; i < 26; i++)
        n->children[i] = 0;
    return n;
}
static struct node *n = NULL;
static
void append(char *s)
{
    struct node *m = NULL;
    m = n;
    for (int i = 0, p = strlen(s); i < p; i++)
    {
        if (m->children[s[i] - 'a'] == NULL)
            m->children[s[i] - 'a'] = newNode(m->children[s[i] - 'a']);
        m = m->children[s[i] - 'a'];
        if (i == p - 1)
        {
            m->isWord = true;
            wordCount++;
        }
    }
}
static
bool load(const char *dictionary)
{
    FILE *f = fopen(dictionary, "r");
    n = newNode(n);
    if (f == NULL)
        return false;
    char s[100];
    while (fgets(s, sizeof(s), f) != 0)
    {
        for (int i = 0, j = strlen(s); i < j; i++)
            s[i] = tolower(s[i]);
    s[strlen(s)-1] = '';
    printf("[%s]n", s);
        append(s);
    }
    return true;
}
int main(void)
{
    if (load("file"))
    {
        printf("File loaded OKn");
        return 0;
    }
    else
    {
        printf("Failed to load filen");
        return 1;
    }
}

数据:

an
anne
apple
aardvark
appalachian
antelope
antediluvian
alabama
antidisestablishmentarianism

valgrind输出:

$ valgrind ./prefix
==29970== Memcheck, a memory error detector
==29970== Copyright (C) 2002-2012, and GNU GPL'd, by Julian Seward et al.
==29970== Using Valgrind-3.8.1 and LibVEX; rerun with -h for copyright info
==29970== Command: ./prefix
==29970== 
[an]
[anne]
[apple]
[aardvark]
[appalachian]
[antelope]
[antediluvian]
[alabama]
[antidisestablishmentarianism]
File loaded OK
==29970== 
==29970== HEAP SUMMARY:
==29970==     in use at exit: 15,472 bytes in 70 blocks
==29970==   total heap usage: 70 allocs, 0 frees, 15,472 bytes allocated
==29970== 
==29970== LEAK SUMMARY:
==29970==    definitely lost: 0 bytes in 0 blocks
==29970==    indirectly lost: 0 bytes in 0 blocks
==29970==      possibly lost: 0 bytes in 0 blocks
==29970==    still reachable: 15,472 bytes in 70 blocks
==29970==         suppressed: 0 bytes in 0 blocks
==29970== Rerun with --leak-check=full to see details of leaked memory
==29970== 
==29970== For counts of detected and suppressed errors, rerun with: -v
==29970== ERROR SUMMARY: 0 errors from 0 contexts (suppressed: 2 from 2)
$

如果你不修剪换行符,你将越界访问内存;这将是我对哪里出了问题的最佳猜测。 问题所在并不明确。 当我注释掉换行符的行时,我收到了关于在该行中使用未初始化数据的投诉:

        if (m->children[s[i] - 'a'] == NULL)

(注意:assert是检测内存分配问题的一种非常粗略的方法;有效但非常粗糙。

相关内容

  • 没有找到相关文章

最新更新