开散列哈希

Common.h
#pragma once
// 使用素数表对齐做哈希表的容量,降低哈希冲突

unsigned long GetNextPrime(unsigned long num);
Common.c
#include "Common.h"
#define _PrimeSize 28
unsigned long _PrimeList[_PrimeSize] =
{
    53ul, 97ul, 193ul, 389ul, 769ul,
    1543ul, 3079ul, 6151ul, 12289ul, 24593ul,
    49157ul, 98317ul, 196613ul, 393241ul, 786433ul,
    1572869ul, 3145739ul, 6291469ul, 12582917ul, 25165843ul,
    50331653ul, 100663319ul, 201326611ul, 402653189ul, 805306457ul,
    1610612741ul, 3221225473ul, 4294967291ul
};
unsigned long GetNextPrime(unsigned long num)
{
    int i = 0;
    for (; i < _PrimeSize; ++i)
    {
        if (num <= _PrimeList[i])
            return _PrimeList[i];
    }
    return _PrimeList[_PrimeSize - 1];
}
HashBucket.h
#pragma once

#include
#include
#include
#include
#include

typedef int K;
typedef int V;

typedef struct Pair
{
    K _key;
    V _value;
}Pair;

typedef struct HashNode
{
    struct HashNode *_pNext;
    Pair _data;
}HashNode, *Node;

typedef struct HashTable
{
    Node* table;
    unsigned long _capacity;
    size_t _size;
}HT;

void InitHashBucket(HT *ht, int capacity);
int HashFun(HT *ht, K key);             //哈希函数
Node BuyNode(K key, V value);                    //申请节点
void checkCapacity(HT *ht);      //增容
int InsertHashBucketUnique(HT * ht, K key, V v);//插入不同
void InsertHashBucketEqual(HT * ht, K key, V v);//可以插相同

int DeleteUnique(HT *ht, K key);       //删除无重复元素
int DeleteEqual(HT *ht, K key); //删除所有值为key节点
Node FindHashBucket(HT* ht, K key); //查找
void PrintHashBucket(HT* ht);     //打印
void DestroyHashBucket(HT* ht);//销毁
HashBucket.c
#include "HashBucket.h"
#include "Common.h"


void InitHashBucket(HT *ht, int capacity)
{
    int i = 0;
    capacity = GetNextPrime(capacity);
    ht->table = (Node*)malloc(sizeof(Node)*capacity);
    if (NULL == ht->table)
    {
        return;
    }
    for (; i < capacity; ++i)
    {
        ht->table[i] = NULL;
    }
    ht->_size = 0;
    ht->_capacity = capacity;
}

Node BuyNode(K key, V value)
{
    Node pnewnode = (Node)malloc(sizeof(HashNode));
    assert(pnewnode);

    pnewnode->_pNext = NULL;
    pnewnode->_data._key = key;
    pnewnode->_data._value = value;
    return pnewnode;
}

int HashFun(HT *ht, K key)
{
    return key % ht->_capacity;
}

void checkCapacity(HT *ht)
{
    assert(ht);
    unsigned long BucketIdx = 0;
    Node pcur = NULL;
    if (ht->_size == ht->_capacity)
    {
        unsigned long newcapacity = GetNextPrime(ht->_capacity);
        Node *newtable = (Node*)malloc(sizeof(Node)*newcapacity);
        //if(NULL == newtable)  malloc可以返回0;assert如果失败,说明系统空间已经不足,程序无法进行。
        //    return 0;
        assert(newtable);
        //当前桶中有效元素放置到新桶中,旧节点元素头插放入新哈希表
        for (; BucketIdx < ht->_capacity; ++BucketIdx)
        {
            pcur = ht->table[BucketIdx];
            while (pcur)
            {
                //头插法
                int BucketNO = pcur->_data._key % newcapacity; //哈希新的桶
                ht->table[BucketIdx] = pcur->_pNext;           //旧桶的地址指向pcur的下一个
                pcur->_pNext = newtable[BucketNO];             //pcur下一个指向新桶
                newtable[BucketNO] = pcur;                     //新桶地址为pcur
                pcur = ht->table[BucketIdx];                   //pcur重新指向旧桶
            }
        }
        free(ht->table);
        ht->table = newtable;
    }
}

int InsertHashBucketUnique(HT * ht, K key, V v) //不能插入相同元素
{
    assert(ht);
    //增容
    checkCapacity(ht);
    int bucketNO = HashFun(ht, key);
    Node pcur = ht->table[bucketNO];
    //检测是否已有相同元素
    while (pcur)
    {
        if (pcur->_data._key == key)
            return 0;
        pcur = pcur->_pNext;
    }
    //插入
    Node pnewnode = BuyNode(key, v);
    pnewnode->_pNext = ht->table[bucketNO];
    ht->table[bucketNO] = pnewnode;
    ht->_size++;
    return 1;
}

void InsertHashBucketEqual(HT * ht, K key, V v)  //可以插入相同元素(头插)
{
    assert(ht);
    //增容
    checkCapacity(ht);
    //计算哈希桶号
    int bucketNO = HashFun(ht, key);
    //申请新节点
    Node pnewnode = BuyNode(key, v);
    pnewnode->_pNext = ht->table[bucketNO]; 
    ht->table[bucketNO] = pnewnode;
    ht->_size++;
}

Node FindHashBucket(HT* ht, K key)
{
    assert(ht);
    int bucketNO = HashFun(ht, key);
    Node pcur = ht->table[bucketNO];
    while (pcur)
    {
        if (pcur->_data._key == key)
            return pcur;
        pcur = pcur->_pNext;
    }
    return NULL;
}

int DeleteUnique(HT *ht, K key)        //删除无重复元素
{
    assert(ht);
    int BucketNo = HashFun(ht, key);
    int oldsize = ht->_size;
    Node pPre = NULL;
    Node pcur = NULL;
    pcur = ht->table[BucketNo];
    while (pcur)
    {
        //存在    
        if (pcur->_data._key == key)
        {
            //是头结点
            if (pcur == ht->table[BucketNo])
            {
                ht->table[BucketNo] = pcur->_pNext;
                free(pcur);
                return 1;
            }
            else
            {
                pPre = pcur;
                pcur = pcur->_pNext;
                pPre->_pNext = pcur->_pNext;
                free(pcur);
                return 1;
            }
        }
    }
    //不存在
    return 0;
}

int DeleteEqual(HT *ht, K key)   //删除所有相同元素
{
    assert(ht);
    assert(ht);
    int BucketNo = HashFun(ht, key);
    int oldsize = ht->_size;
    Node pPre = NULL;
    Node pcur = NULL;
    pcur = ht->table[BucketNo];
    while (pcur)
    {
        //删除一条链表上所有值为key 的节点
        if (pcur->_data._key == key)
        {
            //如果第一个就是要删除的节点
            if (pcur == ht->table[BucketNo])
            {
                ht->table[BucketNo] = pcur->_pNext;
                free(pcur);
                pcur = ht->table[BucketNo];
            }
            else
            {
                pPre->_pNext = pcur->_pNext;
                free(pcur);
                pcur = pPre->_pNext;
            }
            ht->_size--;
        }
        //继续向后走
        else
        {
            pPre = pcur;
            pcur = pcur->_pNext;
        }
    }
    return oldsize != ht->_size;
}

void DestroyHashBucket(HT* ht)  //销毁
{
    assert(ht);
    //每个桶都置空,然后free;
    unsigned long i = 0;
    for (; i < ht->_capacity; ++i)
    {
        Node pcur = ht->table[i];
        while (pcur)
        {
            ht->table[i]= pcur->_pNext;
            free(pcur);
            pcur = ht->table[i];
        }
    }
    free(ht->table);
    ht->table = NULL;
    ht->_size = 0;
    ht->_capacity = 0;
}

void PrintHashBucket(HT* ht)
{
    assert(ht);
    unsigned int BucketIdx = 0;
    for (; BucketIdx < ht->_capacity; ++BucketIdx)
    {
        Node pcur = ht->table[BucketIdx];
        printf("%lu \n", BucketIdx);
        while (pcur)
        {
            printf(" \n", pcur->_data._key, pcur->_data._value);
            pcur = pcur->_pNext;
        }
    }
}

void test()
{
    HT ht;
    InitHashBucket(&ht, 10);
    Node pcur = NULL;
    InsertHashBucketEqual(&ht, 5, 1);
    InsertHashBucketEqual(&ht, 9, 2);
    InsertHashBucketEqual(&ht, 25, 3);
    InsertHashBucketEqual(&ht, 35, 4);
    InsertHashBucketEqual(&ht, 62, 4);
    DeleteUnique(&ht, 35);
    pcur = FindHashBucket(&ht, 25);

    PrintHashBucket(&ht);
    printf("\n");
    printf("key = %d , v = %d ", pcur->_data._key, pcur->_data._value);

    DestroyHashBucket(&ht);
}
test.c
void main()
{
    test();
    system("pause");
}

你可能感兴趣的:(开散列哈希)